bigbroda 0.0.7 → 0.1.0.pre

Sign up to get free protection for your applications and to get access to all the features.
Files changed (45) hide show
  1. checksums.yaml +4 -4
  2. data/Appraisals +15 -0
  3. data/Gemfile +1 -0
  4. data/README.md +39 -21
  5. data/Rakefile +5 -2
  6. data/{google_bigquery.gemspec → bigbroda.gemspec} +2 -2
  7. data/gemfiles/rails_3.gemfile +20 -0
  8. data/gemfiles/rails_4.0.3.gemfile +20 -0
  9. data/gemfiles/rails_4.0.3.gemfile.lock +176 -0
  10. data/gemfiles/rails_4.1.gemfile +20 -0
  11. data/gemfiles/rails_4.1.gemfile.lock +182 -0
  12. data/gemfiles/rails_4.2.gemfile +20 -0
  13. data/gemfiles/rails_4.2.gemfile.lock +202 -0
  14. data/gemfiles/rails_4.gemfile +20 -0
  15. data/gemfiles/rails_4.gemfile.lock +176 -0
  16. data/lib/active_record/connection_adapters/bigquery_adapter.rb +32 -601
  17. data/lib/active_record/connection_adapters/rails_41.rb +607 -0
  18. data/lib/active_record/connection_adapters/rails_42.rb +628 -0
  19. data/lib/{google_bigquery → bigbroda}/auth.rb +3 -3
  20. data/lib/{google_bigquery → bigbroda}/client.rb +3 -3
  21. data/lib/{google_bigquery → bigbroda}/config.rb +1 -1
  22. data/lib/{google_bigquery → bigbroda}/dataset.rb +23 -23
  23. data/lib/{google_bigquery → bigbroda}/engine.rb +4 -4
  24. data/lib/{google_bigquery → bigbroda}/jobs.rb +28 -28
  25. data/lib/bigbroda/project.rb +16 -0
  26. data/lib/{google_bigquery → bigbroda}/railtie.rb +3 -3
  27. data/lib/{google_bigquery → bigbroda}/table.rb +19 -19
  28. data/lib/{google_bigquery → bigbroda}/table_data.rb +7 -7
  29. data/lib/bigbroda/version.rb +3 -0
  30. data/lib/bigbroda.rb +27 -0
  31. data/lib/generators/{google_bigquery → bigbroda}/install/install_generator.rb +2 -2
  32. data/lib/generators/templates/{bigquery.rb.erb → bigbroda.rb.erb} +1 -1
  33. data/spec/dummy/config/application.rb +1 -1
  34. data/spec/functional/adapter/adapter_spec.rb +40 -38
  35. data/spec/functional/auth_spec.rb +3 -3
  36. data/spec/functional/config_spec.rb +5 -5
  37. data/spec/functional/dataset_spec.rb +19 -19
  38. data/spec/functional/project_spec.rb +4 -4
  39. data/spec/functional/table_data_spec.rb +13 -13
  40. data/spec/functional/table_spec.rb +30 -30
  41. data/spec/spec_helper.rb +2 -2
  42. metadata +32 -20
  43. data/lib/google_bigquery/project.rb +0 -16
  44. data/lib/google_bigquery/version.rb +0 -3
  45. data/lib/google_bigquery.rb +0 -27
@@ -1,50 +1,50 @@
1
1
 
2
- module GoogleBigquery
3
- class Dataset < GoogleBigquery::Client
2
+ module BigBroda
3
+ class Dataset < BigBroda::Client
4
4
 
5
- attr_accessor :options
5
+ attr_accessor :options
6
6
 
7
7
  def initialize( opts={})
8
8
  super
9
9
  end
10
10
 
11
11
  def self.list(project_id)
12
- parse_response GoogleBigquery::Auth.client.execute(
13
- GoogleBigquery::Auth.api.datasets.list,
12
+ parse_response BigBroda::Auth.client.execute(
13
+ BigBroda::Auth.api.datasets.list,
14
14
  projectId: project_id
15
15
  )
16
16
  end
17
17
 
18
18
  def self.get(project_id, dataset_id)
19
- res = GoogleBigquery::Auth.client.execute(
20
- :api_method=> GoogleBigquery::Auth.api.datasets.get,
19
+ res = BigBroda::Auth.client.execute(
20
+ :api_method=> BigBroda::Auth.api.datasets.get,
21
21
  :parameters=> {"projectId"=> project_id, "datasetId"=> dataset_id }
22
22
  )
23
23
  parse_response(res)
24
24
  end
25
25
 
26
26
  def self.update(project_id, dataset_id, body={})
27
- res = GoogleBigquery::Auth.client.execute(
28
- :api_method=> GoogleBigquery::Auth.api.datasets.update,
29
- :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
27
+ res = BigBroda::Auth.client.execute(
28
+ :api_method=> BigBroda::Auth.api.datasets.update,
29
+ :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
30
30
  :parameters=> {"projectId"=> project_id, "datasetId"=> dataset_id }
31
31
  )
32
32
  parse_response(res)
33
33
  end
34
34
 
35
35
  def self.patch(project_id, dataset_id, body={})
36
- res = GoogleBigquery::Auth.client.execute(
37
- :api_method=> GoogleBigquery::Auth.api.datasets.update,
38
- :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
36
+ res = BigBroda::Auth.client.execute(
37
+ :api_method=> BigBroda::Auth.api.datasets.update,
38
+ :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
39
39
  :parameters=> {"projectId"=> project_id, "datasetId"=> dataset_id }
40
40
  )
41
41
  parse_response(res)
42
42
  end
43
43
 
44
44
  def self.create(project_id, body={})
45
- res = GoogleBigquery::Auth.client.execute(
46
- :api_method=> GoogleBigquery::Auth.api.datasets.insert,
47
- :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
45
+ res = BigBroda::Auth.client.execute(
46
+ :api_method=> BigBroda::Auth.api.datasets.insert,
47
+ :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
48
48
  :parameters=> {"projectId"=> project_id }
49
49
  )
50
50
  parse_response(res)
@@ -55,18 +55,18 @@ module GoogleBigquery
55
55
 
56
56
  def self.delete(project_id, dataset_id, body={})
57
57
 
58
- tables = GoogleBigquery::Table.list(project_id, dataset_id)["tables"]
59
-
58
+ tables = BigBroda::Table.list(project_id, dataset_id)["tables"]
59
+
60
60
  unless tables.nil? or tables.empty?
61
- tables.map!{|o| o["tableReference"]["tableId"]}
61
+ tables.map!{|o| o["tableReference"]["tableId"]}
62
62
  tables.each do |table_id|
63
- GoogleBigquery::Table.delete(project_id, dataset_id, table_id)
63
+ BigBroda::Table.delete(project_id, dataset_id, table_id)
64
64
  end
65
65
  end
66
66
 
67
- res = GoogleBigquery::Auth.client.execute(
68
- :api_method=> GoogleBigquery::Auth.api.datasets.delete,
69
- #:body_object=> {"deleteContents"=> true},
67
+ res = BigBroda::Auth.client.execute(
68
+ :api_method=> BigBroda::Auth.api.datasets.delete,
69
+ #:body_object=> {"deleteContents"=> true},
70
70
  :parameters=> {"projectId"=> project_id, "datasetId" => dataset_id }
71
71
  )
72
72
  res.status == 204 ? true : parse_response(res)
@@ -2,10 +2,10 @@
2
2
 
3
3
  require "active_record/connection_adapters/bigquery_adapter.rb"
4
4
 
5
- module GoogleBigquery
5
+ module BigBroda
6
6
  class Engine < ::Rails::Engine
7
-
8
- isolate_namespace GoogleBigquery
7
+
8
+ isolate_namespace BigBroda
9
9
  #config.generators do |g|
10
10
  # g.test_framework :rspec,
11
11
  # :fixture_replacement => :factory_girl ,
@@ -13,7 +13,7 @@ module GoogleBigquery
13
13
  # g.integration_tool :rspec
14
14
  #end
15
15
 
16
- #initializer "require GoogleBigquery" do
16
+ #initializer "require BigBroda" do
17
17
  #end
18
18
 
19
19
  end
@@ -1,16 +1,16 @@
1
- module GoogleBigquery
2
- class Jobs < GoogleBigquery::Client
1
+ module BigBroda
2
+ class Jobs < BigBroda::Client
3
3
 
4
4
  def initialize(client=nil, opts={})
5
5
  @client = client
6
6
  end
7
7
 
8
8
  #query
9
- #Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
9
+ #Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout.
10
10
  def self.query(project_id, body={})
11
- res = GoogleBigquery::Auth.client.execute(
12
- :api_method=> GoogleBigquery::Auth.api.jobs.query,
13
- :body_object=> body,
11
+ res = BigBroda::Auth.client.execute(
12
+ :api_method=> BigBroda::Auth.api.jobs.query,
13
+ :body_object=> body,
14
14
  :parameters=> {"projectId"=> project_id}
15
15
  )
16
16
  parse_response(res)
@@ -19,8 +19,8 @@ module GoogleBigquery
19
19
 
20
20
  #Retrieves the specified job by ID.
21
21
  def self.get(project_id , job_id)
22
- res = GoogleBigquery::Auth.client.execute(
23
- :api_method=> GoogleBigquery::Auth.api.jobs.get,
22
+ res = BigBroda::Auth.client.execute(
23
+ :api_method=> BigBroda::Auth.api.jobs.get,
24
24
  :parameters=> {"projectId"=> project_id, "jobId"=>job_id}
25
25
  )
26
26
  parse_response(res)
@@ -28,8 +28,8 @@ module GoogleBigquery
28
28
 
29
29
  #Retrieves the results of a query job.
30
30
  def self.getQueryResults(project_id , job_id, params={})
31
- res = GoogleBigquery::Auth.client.execute(
32
- :api_method=> GoogleBigquery::Auth.api.jobs.get_query_results,
31
+ res = BigBroda::Auth.client.execute(
32
+ :api_method=> BigBroda::Auth.api.jobs.get_query_results,
33
33
  :parameters=> {"projectId"=> project_id, "jobId"=>job_id}.merge(params)
34
34
  )
35
35
  parse_response(res)
@@ -37,9 +37,9 @@ module GoogleBigquery
37
37
 
38
38
  #Starts a new asynchronous job.
39
39
  def self.insert(project_id, body={})
40
- res = GoogleBigquery::Auth.client.execute(
41
- :api_method=> GoogleBigquery::Auth.api.jobs.insert,
42
- :body_object=> body,
40
+ res = BigBroda::Auth.client.execute(
41
+ :api_method=> BigBroda::Auth.api.jobs.insert,
42
+ :body_object=> body,
43
43
  :parameters=> {"projectId"=> project_id}
44
44
  )
45
45
  parse_response(res)
@@ -47,8 +47,8 @@ module GoogleBigquery
47
47
 
48
48
  #Lists all the Jobs in the specified project that were started by the user.
49
49
  def self.list(project_id, params={})
50
- res = GoogleBigquery::Auth.client.execute(
51
- :api_method=> GoogleBigquery::Auth.api.jobs.list,
50
+ res = BigBroda::Auth.client.execute(
51
+ :api_method=> BigBroda::Auth.api.jobs.list,
52
52
  :parameters=> {"projectId"=> project_id}.merge(params)
53
53
  )
54
54
  parse_response(res)
@@ -71,16 +71,16 @@ module GoogleBigquery
71
71
  }
72
72
  }
73
73
 
74
- res = GoogleBigquery::Auth.client.execute(
75
- :api_method=> GoogleBigquery::Auth.api.jobs.insert,
76
- :body_object=> body,
74
+ res = BigBroda::Auth.client.execute(
75
+ :api_method=> BigBroda::Auth.api.jobs.insert,
76
+ :body_object=> body,
77
77
  :parameters=> {"projectId"=> project_id}
78
78
  )
79
-
79
+
80
80
  job_id = JSON.parse(res.body)["jobReference"]["jobId"]
81
81
  puts 'Waiting for export to complete..'
82
82
 
83
- loop do
83
+ loop do
84
84
  status = JSON.parse(self.get(project_id, job_id).body)
85
85
 
86
86
  if 'DONE' == status['status']['state']
@@ -104,8 +104,8 @@ module GoogleBigquery
104
104
  'load'=> {
105
105
  'sourceFormat' => "NEWLINE_DELIMITED_JSON",
106
106
  'sourceUri' => sources.first,
107
- 'sourceUris' => sources,
108
-
107
+ 'sourceUris' => sources,
108
+
109
109
  'destinationTable'=> {
110
110
  'projectId'=> project_id,
111
111
  'datasetId'=> dataset_id,
@@ -114,16 +114,16 @@ module GoogleBigquery
114
114
  }
115
115
  }
116
116
  }
117
- res = GoogleBigquery::Auth.client.execute(
118
- :api_method=> GoogleBigquery::Auth.api.jobs.insert,
119
- :body_object=> body,
117
+ res = BigBroda::Auth.client.execute(
118
+ :api_method=> BigBroda::Auth.api.jobs.insert,
119
+ :body_object=> body,
120
120
  :parameters=> {"projectId"=> project_id}
121
121
  )
122
122
  #binding.pry
123
123
  job_id = JSON.parse(res.body)["jobReference"]["jobId"]
124
124
  puts 'Waiting for import to complete..'
125
-
126
- loop do
125
+
126
+ loop do
127
127
  status = JSON.parse(self.get(project_id, job_id).body)
128
128
 
129
129
  if 'DONE' == status['status']['state']
@@ -149,7 +149,7 @@ module GoogleBigquery
149
149
  private
150
150
 
151
151
  def self.build_body_object(options)
152
- project_id = options[:project_id]
152
+ project_id = options[:project_id]
153
153
  dataset_id = options[:dataset_id]
154
154
  table_id = options[:table_id]
155
155
  bucket_location = options[:bucket_location]
@@ -0,0 +1,16 @@
1
+
2
+ module BigBroda
3
+ class Project < BigBroda::Client
4
+
5
+ attr_accessor :options
6
+
7
+ def initialize( opts={})
8
+ super
9
+ end
10
+
11
+ def self.list
12
+ parse_response BigBroda::Auth.client.execute( BigBroda::Auth.api.projects.list)
13
+ end
14
+
15
+ end
16
+ end
@@ -1,10 +1,10 @@
1
1
  # encoding: UTF-8
2
2
 
3
- module GoogleBigquery
3
+ module BigBroda
4
4
  class Railtie < ::Rails::Railtie
5
5
  config.before_configuration do
6
6
  require "#{Rails.root}/config/initializers/bigquery"
7
- GoogleBigquery::Auth.new.authorize
7
+ BigBroda::Auth.new.authorize
8
8
  # if config.action_view.javascript_expansions
9
9
  # config.action_view.javascript_expansions[:high_charts] |= %w(highcharts exporting)
10
10
  # end
@@ -19,7 +19,7 @@ module GoogleBigquery
19
19
  rake_tasks do
20
20
  require "active_record/base"
21
21
  require "active_record/tasks/bigquery_database_tasks"
22
-
22
+
23
23
  #ActiveRecord::Tasks::DatabaseTasks.seed_loader = Rails.application
24
24
  #ActiveRecord::Tasks::DatabaseTasks.env = Rails.env
25
25
 
@@ -1,59 +1,59 @@
1
1
 
2
- module GoogleBigquery
3
- class Table < GoogleBigquery::Client
2
+ module BigBroda
3
+ class Table < BigBroda::Client
4
4
 
5
- attr_accessor :options
5
+ attr_accessor :options
6
6
 
7
7
  def initialize( opts={})
8
8
  super
9
9
  end
10
10
 
11
11
  def self.list(project_id, dataset_id)
12
- parse_response GoogleBigquery::Auth.client.execute(
13
- GoogleBigquery::Auth.api.tables.list,
12
+ parse_response BigBroda::Auth.client.execute(
13
+ BigBroda::Auth.api.tables.list,
14
14
  projectId: project_id, datasetId: dataset_id
15
15
  )
16
16
  end
17
17
 
18
18
  def self.get(project_id, dataset_id, table_id)
19
- res = GoogleBigquery::Auth.client.execute(
20
- :api_method=> GoogleBigquery::Auth.api.tables.get,
19
+ res = BigBroda::Auth.client.execute(
20
+ :api_method=> BigBroda::Auth.api.tables.get,
21
21
  :parameters=> {"projectId"=> project_id, "datasetId"=> dataset_id, "tableId"=> table_id }
22
22
  )
23
23
  parse_response(res)
24
24
  end
25
25
 
26
26
  def self.update(project_id, dataset_id, table_id, body={})
27
- res = GoogleBigquery::Auth.client.execute(
28
- :api_method=> GoogleBigquery::Auth.api.tables.update,
29
- :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
27
+ res = BigBroda::Auth.client.execute(
28
+ :api_method=> BigBroda::Auth.api.tables.update,
29
+ :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
30
30
  :parameters=> {"projectId"=> project_id, "datasetId" => dataset_id, "tableId"=> table_id }
31
31
  )
32
32
  parse_response(res)
33
33
  end
34
34
 
35
35
  def self.patch(project_id, dataset_id, table_id, body={})
36
- res = GoogleBigquery::Auth.client.execute(
37
- :api_method=> GoogleBigquery::Auth.api.tables.update,
38
- :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
36
+ res = BigBroda::Auth.client.execute(
37
+ :api_method=> BigBroda::Auth.api.tables.update,
38
+ :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
39
39
  :parameters=> {"projectId"=> project_id, "datasetId" => dataset_id, "tableId"=> table_id }
40
40
  )
41
41
  parse_response(res)
42
42
  end
43
43
 
44
44
  def self.create(project_id, dataset_id, body={})
45
- res = GoogleBigquery::Auth.client.execute(
46
- :api_method=> GoogleBigquery::Auth.api.tables.insert,
47
- :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
45
+ res = BigBroda::Auth.client.execute(
46
+ :api_method=> BigBroda::Auth.api.tables.insert,
47
+ :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
48
48
  :parameters=> {"projectId"=> project_id, "datasetId"=> dataset_id }
49
49
  )
50
50
  parse_response(res)
51
51
  end
52
52
 
53
53
  def self.delete(project_id, dataset_id, table_id, body={})
54
- res = GoogleBigquery::Auth.client.execute(
55
- :api_method=> GoogleBigquery::Auth.api.tables.delete,
56
- #:body_object=> body, #{"deleteContents"=> false},
54
+ res = BigBroda::Auth.client.execute(
55
+ :api_method=> BigBroda::Auth.api.tables.delete,
56
+ #:body_object=> body, #{"deleteContents"=> false},
57
57
  :parameters=> {"projectId"=> project_id, "datasetId" => dataset_id, "tableId"=> table_id }
58
58
  )
59
59
  res.status == 204 ? true : parse_response(res)
@@ -1,19 +1,19 @@
1
1
 
2
- module GoogleBigquery
3
- class TableData < GoogleBigquery::Client
2
+ module BigBroda
3
+ class TableData < BigBroda::Client
4
4
 
5
5
  def self.create(project_id, dataset_id, table_id, body={})
6
- res = GoogleBigquery::Auth.client.execute(
7
- :api_method=> GoogleBigquery::Auth.api.tabledata.insert_all,
8
- :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
6
+ res = BigBroda::Auth.client.execute(
7
+ :api_method=> BigBroda::Auth.api.tabledata.insert_all,
8
+ :body_object=> body , #{"datasetReference"=> {"datasetId" =>"whoa"}},
9
9
  :parameters=> {"projectId"=> project_id, "datasetId"=> dataset_id, "tableId"=>table_id }
10
10
  )
11
11
  parse_response(res)
12
12
  end
13
13
 
14
14
  def self.list(project_id, dataset_id, table_id)
15
- res = GoogleBigquery::Auth.client.execute(
16
- :api_method=> GoogleBigquery::Auth.api.tabledata.list,
15
+ res = BigBroda::Auth.client.execute(
16
+ :api_method=> BigBroda::Auth.api.tabledata.list,
17
17
  :parameters=> {"projectId"=> project_id, "datasetId"=> dataset_id, "tableId"=>table_id }
18
18
  )
19
19
  parse_response(res)
@@ -0,0 +1,3 @@
1
+ module BigBroda
2
+ VERSION = "0.1.0.pre"
3
+ end
data/lib/bigbroda.rb ADDED
@@ -0,0 +1,27 @@
1
+ $:.unshift(File.dirname(__FILE__)) unless $:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
2
+
3
+ require "google/api_client"
4
+ require "active_support"
5
+
6
+ module BigBroda
7
+
8
+ autoload :VERSION, 'bigbroda/version.rb'
9
+ autoload :Config, 'bigbroda/config.rb'
10
+ autoload :Auth, 'bigbroda/auth.rb'
11
+ autoload :Client, 'bigbroda/client.rb'
12
+ autoload :Project, 'bigbroda/project.rb'
13
+ autoload :Dataset, 'bigbroda/dataset.rb'
14
+ autoload :Table, 'bigbroda/table.rb'
15
+ autoload :TableData,'bigbroda/table_data.rb'
16
+ autoload :Jobs, 'bigbroda/jobs.rb'
17
+
18
+ if defined?(::Rails::Railtie)
19
+ autoload :Rails, 'bigbroda/engine.rb' if ::Rails.version >= '3.1'
20
+ end
21
+
22
+ if defined?(::Rails::Railtie)
23
+ autoload :Rails, 'bigbroda/engine.rb' if ::Rails.version >= '3.1'
24
+ require File.join(File.dirname(__FILE__), *%w[bigbroda railtie]) if ::Rails.version.to_s >= '3.1'
25
+ end
26
+
27
+ end
@@ -1,6 +1,6 @@
1
1
  # encoding: UTF-8
2
2
 
3
- module GoogleBigquery
3
+ module BigBroda
4
4
  class InstallGenerator < ::Rails::Generators::Base
5
5
 
6
6
  source_root File.expand_path("../../../templates", __FILE__)
@@ -10,7 +10,7 @@ module GoogleBigquery
10
10
 
11
11
  def copy_initializer
12
12
  say_status("installing", "BigQuery", :green)
13
- copy_file "bigquery.rb.erb", "config/initializers/bigquery.rb"
13
+ copy_file "bigquery.rb.erb", "config/initializers/bigbroda.rb"
14
14
  end
15
15
 
16
16
  def show_readme
@@ -1,4 +1,4 @@
1
- GoogleBigquery::Config.setup do |config|
1
+ BigBroda::Config.setup do |config|
2
2
  #config.pass_phrase = "notasecret"
3
3
  #config.key_file = Rails.root + "/config/XXXXkey_file.p12"
4
4
  #config.scope = "https://www.googleapis.com/auth/bigquery"
@@ -3,7 +3,7 @@ require File.expand_path('../boot', __FILE__)
3
3
  require 'rails/all'
4
4
 
5
5
  Bundler.require(*Rails.groups)
6
- require "google_bigquery"
6
+ require "big_broda"
7
7
 
8
8
  module Dummy
9
9
  class Application < Rails::Application
@@ -46,21 +46,21 @@ class RemovePublishedToUser < ActiveRecord::Migration
46
46
  end
47
47
  end
48
48
 
49
- class User < ActiveRecord::Base
49
+ class User < ActiveRecord::Base
50
50
  validates :name, presence: true
51
51
  has_many :posts
52
52
 
53
53
  scope :admins , ->{where(admin: true)}
54
54
  end
55
55
 
56
- class Post < ActiveRecord::Base
56
+ class Post < ActiveRecord::Base
57
57
  validates :title, presence: true
58
58
  belongs_to :user
59
59
  end
60
60
 
61
61
  def create_tables
62
- @table = GoogleBigquery::Table.create(@project, @name, @table_body )
63
-
62
+ @table = BigBroda::Table.create(@project, @name, @table_body )
63
+
64
64
  @rows = {"rows"=> [
65
65
  {
66
66
  "insertId"=> Time.now.to_i.to_s,
@@ -69,8 +69,8 @@ def create_tables
69
69
  }
70
70
  }
71
71
  ]}
72
-
73
- GoogleBigquery::TableData.create(@project, @name, @table_name , @rows )
72
+
73
+ BigBroda::TableData.create(@project, @name, @table_name , @rows )
74
74
  end
75
75
 
76
76
  describe "ActiveRecord Adapter", :vcr => { :allow_unused_http_interactions => true } do
@@ -80,11 +80,11 @@ describe "ActiveRecord Adapter", :vcr => { :allow_unused_http_interactions => tr
80
80
  let(:add_col_migration) { AddPublishedToUser.new}
81
81
  let(:remove_col_migration) { RemovePublishedToUser.new}
82
82
 
83
- before :all do
83
+ before :all do
84
84
 
85
85
  VCR.use_cassette("ActiveRecord_Adapter/authorize_config") do
86
86
  config_setup
87
- @auth = GoogleBigquery::Auth.new
87
+ @auth = BigBroda::Auth.new
88
88
  @auth.authorize
89
89
  @name = "rspec_schema"
90
90
  @project = config_options["email"].match(/(\d*)/)[0]
@@ -93,8 +93,8 @@ describe "ActiveRecord Adapter", :vcr => { :allow_unused_http_interactions => tr
93
93
  @table_body = { "tableReference"=> {
94
94
  "projectId"=> @project,
95
95
  "datasetId"=> @name,
96
- "tableId"=> @table_name},
97
- "schema"=> [:fields=>[
96
+ "tableId"=> @table_name},
97
+ "schema"=> [:fields=>[
98
98
  {:name=> "id", :type=> "string"},
99
99
  {:name=> "name", :type=> "string", :mode => "REQUIRED"},
100
100
  {:name=> "age", :type=> "integer"},
@@ -105,28 +105,28 @@ describe "ActiveRecord Adapter", :vcr => { :allow_unused_http_interactions => tr
105
105
  }
106
106
 
107
107
  ActiveRecord::Base.establish_connection(
108
- :adapter => 'bigquery',
108
+ :adapter => 'bigquery',
109
109
  :project => @project,
110
110
  :database => @name
111
111
  )
112
112
  end
113
113
  end
114
114
 
115
- before :each do
115
+ before :each do
116
116
  VCR.use_cassette("ActiveRecord_Adapter/create_each") do
117
- GoogleBigquery::Dataset.create(@project,
117
+ BigBroda::Dataset.create(@project,
118
118
  {"datasetReference"=> { "datasetId" => @name }} )
119
- create_tables
119
+ create_tables
120
120
  end
121
121
  end
122
122
 
123
- after :each do
123
+ after :each do
124
124
  VCR.use_cassette("ActiveRecord_Adapter/after_each") do
125
- GoogleBigquery::Dataset.delete(@project, @name)
125
+ BigBroda::Dataset.delete(@project, @name)
126
126
  end
127
127
  end
128
128
 
129
- describe "adapter" do
129
+ describe "adapter" do
130
130
 
131
131
  it "simple quering", :vcr do
132
132
  #sleep 50
@@ -137,6 +137,7 @@ describe "ActiveRecord Adapter", :vcr => { :allow_unused_http_interactions => tr
137
137
  #User.create(name: "frank capra")
138
138
  #User.find_by(id: "some-id-1393025921")
139
139
  #User.where("id =? and name= ?", "some-id-1393025921", "User 2014-02-21 20:38:41 -0300")
140
+
140
141
  expect(User.count).to be 1
141
142
  expect(User.first).to be_an_instance_of User
142
143
  expect(User.all.size).to be 1
@@ -144,15 +145,15 @@ describe "ActiveRecord Adapter", :vcr => { :allow_unused_http_interactions => tr
144
145
  end
145
146
 
146
147
  describe "migrations" do
147
-
148
- before :each do
148
+
149
+ before :each do
149
150
  VCR.use_cassette("ActiveRecord_Adapter/after_each") do
150
- GoogleBigquery::Table.delete(@project, @name, "users")
151
- migration.up; User.reset_column_information
151
+ BigBroda::Table.delete(@project, @name, "users")
152
+ migration.up; User.reset_column_information
152
153
  end
153
154
  end
154
-
155
- describe '#up', vcr: {:record => :new_episodes} do
155
+
156
+ describe '#up', vcr: {:record => :new_episodes} do
156
157
  it 'adds the created_at & updated_at column', :vcr do
157
158
  User.columns_hash.should have_key('created_at')
158
159
  User.columns_hash.should have_key('updated_at')
@@ -160,43 +161,44 @@ describe "ActiveRecord Adapter", :vcr => { :allow_unused_http_interactions => tr
160
161
  end
161
162
 
162
163
  describe '#down', vcr: {:record => :new_episodes} do
163
- before {
164
- migration.down; User.reset_column_information
164
+ before {
165
+ migration.down; User.reset_column_information
165
166
  }
166
-
167
+
167
168
  it 'adds the email_at_utc_hour column' do
168
169
  User.should_not be_table_exists
169
170
  end
170
171
 
171
172
  end
172
173
 
173
- #describe "add column", vcr: {:record => :new_episodes} do
174
- # before {
175
- # add_col_migration.change; User.reset_column_information
174
+ #describe "add column", vcr: {:record => :new_episodes} do
175
+ # before {
176
+ # add_col_migration.change; User.reset_column_information
176
177
  # }
177
-
178
+
178
179
  # it 'adds published column' do
179
180
  # #binding.pry
180
181
  # User.columns_hash.should have_key('published')
181
182
  # end
182
183
  #end
183
184
 
184
- describe "remove column", vcr: {:record => :new_episodes} do
185
- before {
186
- add_col_migration.change; User.reset_column_information
185
+ describe "remove column", vcr: {:record => :new_episodes} do
186
+ before {
187
+ add_col_migration.change; User.reset_column_information
187
188
  }
188
-
189
+
189
190
  it 'should raise error' do
190
191
  expect{remove_col_migration.change}.to raise_error
191
192
  end
192
193
  end
193
194
 
194
- describe "associations", vcr: {:record => :new_episodes} do
195
- before {
196
- posts_migration.up; Post.reset_column_information
195
+ describe "associations", vcr: {:record => :new_episodes} do
196
+ before {
197
+ posts_migration.up; Post.reset_column_information
197
198
  }
198
199
 
199
- it "users_posts" do
200
+ it "users_posts" do
201
+ #binding.pry
200
202
  User.create(name: "ALF")
201
203
  #sleep 50
202
204
  post = User.first.posts.create(title: "yeah")
@@ -3,15 +3,15 @@ require File.expand_path(File.dirname(__FILE__) + '../../spec_helper')
3
3
  describe "Config" do
4
4
  before(:all) do
5
5
  config_setup
6
- @auth = GoogleBigquery::Auth.new
6
+ @auth = BigBroda::Auth.new
7
7
  end
8
8
 
9
9
  it "authorization object" do
10
10
  VCR.use_cassette('auth') do
11
11
  @auth.authorize
12
12
  @auth.api.class.should be Google::APIClient::API
13
- GoogleBigquery::Auth.api.class.should be Google::APIClient::API
14
- GoogleBigquery::Auth.client.class.should be Google::APIClient
13
+ BigBroda::Auth.api.class.should be Google::APIClient::API
14
+ BigBroda::Auth.client.class.should be Google::APIClient
15
15
  end
16
16
  end
17
17