big_ml 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. data/CHANGELOG.md +5 -0
  2. data/README.md +55 -18
  3. data/big_ml.gemspec +1 -0
  4. data/lib/big_ml/base.rb +21 -11
  5. data/lib/big_ml/dataset.rb +10 -4
  6. data/lib/big_ml/model.rb +16 -10
  7. data/lib/big_ml/prediction.rb +6 -4
  8. data/lib/big_ml/source.rb +10 -4
  9. data/lib/big_ml/{authenticable.rb → util/authenticable.rb} +0 -0
  10. data/lib/big_ml/util/client.rb +26 -0
  11. data/lib/big_ml/util/config.rb +32 -0
  12. data/lib/big_ml/{request.rb → util/request.rb} +0 -0
  13. data/lib/big_ml/version.rb +1 -1
  14. data/lib/big_ml.rb +8 -4
  15. data/spec/integration/dataset_spec.rb +17 -0
  16. data/spec/integration/model_spec.rb +17 -0
  17. data/spec/integration/prediction_spec.rb +11 -0
  18. data/spec/integration/source_spec.rb +19 -6
  19. data/spec/units/client_spec.rb +5 -5
  20. data/spec/vcr_cassettes/BigML_Dataset/no_dataset/_all/must_be_empty.yml +53 -17
  21. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/can_be_converted_in_a_model.yml +232 -0
  22. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_be_able_to_be_deleted_using_the_destroy_method.yml +288 -0
  23. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_be_able_to_be_find_using_the_reference.yml +49 -49
  24. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_be_able_to_remove_the_dataset.yml +41 -77
  25. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_be_able_to_update_the_name.yml +57 -57
  26. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_be_able_to_update_the_name_from_the_instance.yml +360 -0
  27. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_have_only_one_item.yml +44 -44
  28. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_have_the_same_file_name.yml +44 -44
  29. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/was_created_successfully.yml +22 -22
  30. data/spec/vcr_cassettes/BigML_Model/no_model/_all/must_be_empty.yml +133 -21
  31. data/spec/vcr_cassettes/BigML_Model/one_model/can_be_converted_in_a_prediction.yml +346 -0
  32. data/spec/vcr_cassettes/BigML_Model/one_model/{must_have_the_same_file_name.yml → must_be_able_to_be_deleted_using_the_destroy_method.yml} +90 -107
  33. data/spec/vcr_cassettes/BigML_Model/one_model/must_be_able_to_be_find_using_the_reference.yml +172 -161
  34. data/spec/vcr_cassettes/BigML_Model/one_model/must_be_able_to_remove_the_model.yml +194 -70
  35. data/spec/vcr_cassettes/BigML_Model/one_model/must_be_able_to_update_the_name.yml +225 -243
  36. data/spec/vcr_cassettes/BigML_Model/one_model/must_be_able_to_update_the_name_from_the_instance.yml +686 -0
  37. data/spec/vcr_cassettes/BigML_Model/one_model/must_have_only_one_item.yml +169 -52
  38. data/spec/vcr_cassettes/BigML_Model/one_model/must_have_the_same_size.yml +91 -90
  39. data/spec/vcr_cassettes/BigML_Model/one_model/was_created_successfully.yml +35 -114
  40. data/spec/vcr_cassettes/BigML_Prediction/no_prediction/_all/must_be_empty.yml +141 -89
  41. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_be_able_to_be_deleted_using_the_destroy_method.yml +474 -0
  42. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_be_able_to_be_find_using_the_reference.yml +88 -88
  43. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_be_able_to_remove_the_prediction.yml +88 -88
  44. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_be_able_to_update_the_name.yml +97 -97
  45. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_be_able_to_update_the_name_from_the_instance.yml +545 -0
  46. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_have_only_one_item.yml +90 -90
  47. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_have_the_same_name.yml +161 -78
  48. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/was_created_successfully.yml +45 -45
  49. data/spec/vcr_cassettes/BigML_Source/no_source/_all/must_be_empty.yml +12 -42
  50. data/spec/vcr_cassettes/BigML_Source/one_source/can_be_converted_in_a_dataset.yml +119 -0
  51. data/spec/vcr_cassettes/BigML_Source/one_source/must_be_able_to_be_deleted_using_the_destroy_method.yml +135 -0
  52. data/spec/vcr_cassettes/BigML_Source/one_source/must_be_able_to_be_find_using_the_reference.yml +22 -52
  53. data/spec/vcr_cassettes/BigML_Source/one_source/must_be_able_to_remove_the_source.yml +43 -30
  54. data/spec/vcr_cassettes/BigML_Source/one_source/must_be_able_to_update_the_name.yml +38 -68
  55. data/spec/vcr_cassettes/BigML_Source/one_source/must_be_able_to_update_the_name_from_the_instance.yml +249 -0
  56. data/spec/vcr_cassettes/BigML_Source/one_source/must_have_only_one_item.yml +24 -54
  57. data/spec/vcr_cassettes/BigML_Source/one_source/must_have_the_same_file_name.yml +21 -51
  58. data/spec/vcr_cassettes/BigML_Source/one_source/was_created_successfully.yml +8 -38
  59. metadata +50 -9
  60. data/.rvmrc +0 -1
  61. data/lib/big_ml/client.rb +0 -29
  62. data/lib/big_ml/config.rb +0 -30
data/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.1.1
4
+ - Casting method to_* added
5
+ - Update and destroy methods in the instances
6
+ - README updated
7
+
3
8
  ## 0.1.0
4
9
  - BigML::Authenticable
5
10
  - BigML::Source
data/README.md CHANGED
@@ -1,44 +1,81 @@
1
1
  # BigML
2
2
 
3
- A Ruby wrapper for the [BigML REST API](https://bigml.com/developers)
3
+ A Ruby wrapper for the [BigML REST API](https://bigml.com/developers), provides access to sources, datasets, models, and predictions. You can create, retrieve, update and delete
4
4
 
5
- ## Compile into system gems
5
+ [![Build Status](https://secure.travis-ci.org/vigosan/big_ml.png?branch=master)](http://travis-ci.org/vigosan/big_ml)
6
+
7
+ ## Synopsis
8
+
9
+ ```ruby
10
+ require 'rubygems'
11
+ require 'big_ml'
6
12
 
7
- rake build; rake install
13
+ BigML.configure do |c|
14
+ c.username = 'foo'
15
+ c.api_key = 'bar'
16
+ end
17
+
18
+ # easy creation of sources
19
+ file_path = "fixtures/iris.csv"
20
+ source = BigML::Source.create(file_path)
21
+
22
+ # Are you looking for old dataset?
23
+ all_datasets = BigML::Dataset.all
24
+
25
+ # or just want to create one?
26
+ dataset = source.to_dataset
27
+
28
+ # find a concrete one
29
+ model = BigML::Model.find("4fe8868a035d07682f002891")
30
+
31
+ # create you predictions using parameters...
32
+ prediction = BigML::Prediction.create(model.resource, { :input_data => { "000001" => 3 }})
33
+ prediction.destroy
34
+
35
+ # you can destroy everything!
36
+ BigML::Model.delete("4fe8868a035d07682f002891")
37
+
38
+ ```
8
39
 
9
40
  ## Installation
10
41
 
42
+ ### Bundler
43
+
11
44
  Add this line to your application's Gemfile:
12
45
 
46
+ source 'https://rubygems.org'
13
47
  gem 'big_ml'
14
48
 
15
- And then execute:
49
+ And then install the new dependencies with:
16
50
 
17
- $ bundle
51
+ $ bundle install
18
52
 
19
- ## Usage
53
+ ### Rubygems
20
54
 
21
- ```ruby
22
- require 'big_ml'
55
+ Install the gem with rubygem in your system:
23
56
 
24
- BigML.configure do |c|
25
- c.username = 'foo'
26
- c.api_key = 'bar'
27
- end
57
+ $ gem install big_ml
58
+
59
+ ### Dependencies
60
+
61
+ - httparty
28
62
 
29
- source = BigML::Source.create("spec/fixtures/iris.csv")
30
- dataset = BigML::Dataset.create(source.resource)
31
- model = BigML::Model.create(dataset.resource)
32
- prediction = BigML::Prediction.create(model.resource, { :input_data => { "000001" => 3 }})
33
- ```
34
63
 
35
64
  ## Ruby Interpreter Compatibility
65
+
36
66
  This library has been tested on the following ruby interpreters:
37
67
 
38
68
  * MRI 1.9.2
39
69
  * MRI 1.9.3
40
70
 
41
- [![Build Status](https://secure.travis-ci.org/vigosan/big_ml.png?branch=master)](http://travis-ci.org/vigosan/big_ml)
71
+ ## Running the tests
72
+
73
+ Download and run the test suit:
74
+
75
+ $ git clone git@github.com:vigosan/big_ml.git
76
+ $ cd big_ml
77
+ $ bundle install
78
+ $ rspec spec
42
79
 
43
80
  ## Contributing
44
81
 
data/big_ml.gemspec CHANGED
@@ -9,6 +9,7 @@ Gem::Specification.new do |gem|
9
9
  gem.homepage = "https://bigml.com"
10
10
 
11
11
  gem.add_dependency "httmultiparty"
12
+ gem.add_dependency "json"
12
13
  gem.add_development_dependency "rake"
13
14
  gem.add_development_dependency "rspec"
14
15
  gem.add_development_dependency "vcr"
data/lib/big_ml/base.rb CHANGED
@@ -1,17 +1,27 @@
1
+ require 'big_ml/util/client'
2
+
1
3
  module BigML
2
4
  class Base
3
- attr_accessor :attrs
5
+ attr_accessor :attrs
6
+
7
+ def initialize(attrs = {})
8
+ @attrs = attrs.dup
9
+ @attrs.each { |key, value|
10
+ instance_variable_set("@#{key}".to_sym, value)
11
+ }
12
+ end
4
13
 
5
- def initialize(attrs = {})
6
- @attrs = attrs.dup
7
- @attrs.each { |key, value|
8
- instance_variable_set("@#{key}".to_sym, value)
9
- }
10
- end
14
+ def id
15
+ resource.split('/').last
16
+ end
11
17
 
12
- def id
13
- resource.split('/').last
14
- end
18
+ def destroy
19
+ self.class.delete(id)
20
+ end
21
+
22
+ def update(options)
23
+ self.class.update(id, options)
24
+ end
15
25
 
16
26
  class << self
17
27
  def all(options = {})
@@ -40,7 +50,7 @@ module BigML
40
50
  private
41
51
 
42
52
  def client
43
- @client ||= Client.new
53
+ @client ||= Util::Client.new
44
54
  end
45
55
 
46
56
  def resource_name
@@ -1,3 +1,5 @@
1
+ require 'big_ml/base'
2
+
1
3
  module BigML
2
4
  class Dataset < Base
3
5
  DATASET_PROPERTIES = [
@@ -5,15 +7,19 @@ module BigML
5
7
  :description, :fields, :locale, :name, :number_of_models,
6
8
  :number_of_predictions, :private, :resource, :rows, :size,
7
9
  :source, :source_status, :status, :tags, :updated
8
- ]
10
+ ]
11
+
12
+ attr_reader *DATASET_PROPERTIES
9
13
 
10
- attr_reader *DATASET_PROPERTIES
14
+ def to_model
15
+ Model.create(resource)
16
+ end
11
17
 
12
- class << self
18
+ class << self
13
19
  def create(source, options = {})
14
20
  response = client.post("/#{resource_name}", options, { :source => source })
15
21
  self.new(response) if response.success?
16
22
  end
17
- end
23
+ end
18
24
  end
19
25
  end
data/lib/big_ml/model.rb CHANGED
@@ -1,21 +1,27 @@
1
+ require 'big_ml/base'
2
+
1
3
  module BigML
2
4
  class Model < Base
3
5
  MODEL_PROPERTIES = [
4
- :category, :code, :columns, :created, :credits,
5
- :dataset, :dataset_status, :description, :holdout,
6
- :input_fields, :locale, :max_columns, :max_rows, :model,
7
- :name, :number_of_predictions, :objective_fields, :private,
8
- :range, :resource, :rows, :size, :source, :source_status,
9
- :status, :tags, :updated
10
- ]
6
+ :category, :code, :columns, :created, :credits,
7
+ :dataset, :dataset_status, :description, :holdout,
8
+ :input_fields, :locale, :max_columns, :max_rows, :model,
9
+ :name, :number_of_predictions, :objective_fields, :private,
10
+ :range, :resource, :rows, :size, :source, :source_status,
11
+ :status, :tags, :updated
12
+ ]
13
+
14
+ attr_reader *MODEL_PROPERTIES
11
15
 
12
- attr_reader *MODEL_PROPERTIES
16
+ def to_prediction(options)
17
+ Prediction.create(resource, options)
18
+ end
13
19
 
14
- class << self
20
+ class << self
15
21
  def create(dataset, options = {})
16
22
  response = client.post("/#{resource_name}", options, { :dataset => dataset })
17
23
  self.new(response) if response.success?
18
24
  end
19
- end
25
+ end
20
26
  end
21
27
  end
@@ -1,3 +1,5 @@
1
+ require 'big_ml/base'
2
+
1
3
  module BigML
2
4
  class Prediction < Base
3
5
  PREDICTION_PROPERTIES = [
@@ -5,15 +7,15 @@ module BigML
5
7
  :description, :fields, :input_data, :model, :model_status, :name,
6
8
  :objective_fields, :prediction, :prediction_path, :private, :resource,
7
9
  :source, :source_status, :status, :tags, :updated
8
- ]
10
+ ]
9
11
 
10
- attr_reader *PREDICTION_PROPERTIES
12
+ attr_reader *PREDICTION_PROPERTIES
11
13
 
12
- class << self
14
+ class << self
13
15
  def create(model, options = {})
14
16
  response = client.post("/#{resource_name}", {}, { :model => model }.merge!(options))
15
17
  self.new(response) if response.success?
16
18
  end
17
- end
19
+ end
18
20
  end
19
21
  end
data/lib/big_ml/source.rb CHANGED
@@ -1,18 +1,24 @@
1
+ require 'big_ml/base'
2
+
1
3
  module BigML
2
4
  class Source < Base
3
5
  SOURCE_PROPERTIES = [
4
6
  :code, :content_type, :created, :credits, :fields, :file_name, :md5,
5
7
  :name, :number_of_datasets, :number_of_models, :number_of_predictions,
6
8
  :private, :resource, :size, :source_parser, :status, :type, :updated
7
- ]
9
+ ]
10
+
11
+ attr_reader *SOURCE_PROPERTIES
8
12
 
9
- attr_reader *SOURCE_PROPERTIES
13
+ def to_dataset
14
+ Dataset.create(resource)
15
+ end
10
16
 
11
- class << self
17
+ class << self
12
18
  def create(file, options = {})
13
19
  response = client.post("/#{resource_name}", options.merge(:multipart => true, :file => File.new(file)))
14
20
  self.new(response) if response.success?
15
21
  end
16
- end
22
+ end
17
23
  end
18
24
  end
@@ -0,0 +1,26 @@
1
+ require 'httmultiparty'
2
+ require 'big_ml/util/authenticable'
3
+ require 'big_ml/util/config'
4
+ require 'big_ml/util/request'
5
+
6
+ module BigML
7
+ module Util
8
+ class Client
9
+ include HTTMultiParty
10
+ include Authenticable
11
+ include Request
12
+ format :json
13
+
14
+ base_uri Config::BIGML_ENDPOINT
15
+
16
+ attr_accessor *Config::VALID_OPTIONS_KEYS
17
+
18
+ def initialize(attrs={})
19
+ attrs = BigML.options.merge(attrs)
20
+ Config::VALID_OPTIONS_KEYS.each { |key|
21
+ instance_variable_set("@#{key}".to_sym, attrs[key])
22
+ }
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,32 @@
1
+ module BigML
2
+ module Util
3
+ module Config
4
+ # The endpoint that will be used to connect if none is set
5
+ BIGML_ENDPOINT = "https://bigml.io/andromeda"
6
+ # The username if none is set
7
+ DEFAULT_BIGML_USERNAME = nil
8
+ # The api key if none is set
9
+ DEFAULT_BIGML_API_KEY = nil
10
+ # An array of valid keys in the options hash when configuring a {BigML::Client}
11
+ VALID_OPTIONS_KEYS = [:username, :api_key]
12
+
13
+ attr_accessor *VALID_OPTIONS_KEYS
14
+
15
+ def configure
16
+ yield self
17
+ self
18
+ end
19
+
20
+ def options
21
+ options = {}
22
+ VALID_OPTIONS_KEYS.each{ |k| options[k] = send(k) }
23
+ options
24
+ end
25
+
26
+ def reset
27
+ self.username = DEFAULT_BIGML_USERNAME
28
+ self.api_key = DEFAULT_BIGML_API_KEY
29
+ end
30
+ end
31
+ end
32
+ end
File without changes
@@ -1,3 +1,3 @@
1
1
  module BigML
2
- VERSION = "0.1.0"
2
+ VERSION = "0.1.1"
3
3
  end
data/lib/big_ml.rb CHANGED
@@ -1,9 +1,13 @@
1
- require 'big_ml/version'
2
- require 'big_ml/client'
3
- require 'big_ml/config'
1
+ require 'big_ml/util/client'
2
+ require 'big_ml/model'
3
+ require 'big_ml/dataset'
4
+ require 'big_ml/prediction'
5
+ require 'big_ml/source'
6
+
7
+ require 'big_ml/util/config'
4
8
 
5
9
  module BigML
6
- extend Config
10
+ extend Util::Config
7
11
 
8
12
  class << self
9
13
  def new(options = {})
@@ -42,10 +42,27 @@ describe BigML::Dataset, :vcr do
42
42
  BigML::Dataset.find(@dataset.id).name.should == 'foo name'
43
43
  end
44
44
 
45
+ it "must be able to update the name from the instance" do
46
+ @dataset.update(:name => 'foo name').code.should == 202
47
+ BigML::Dataset.find(@dataset.id).name.should == 'foo name'
48
+ end
49
+
50
+ it "must be able to be deleted using the destroy method" do
51
+ dataset_id = @dataset.id
52
+ @dataset.destroy
53
+ BigML::Dataset.find(dataset_id).should be_nil
54
+ end
55
+
45
56
  it "must be able to remove the dataset" do
46
57
  BigML::Dataset.delete(@dataset.id)
47
58
  BigML::Dataset.find(@dataset.id).should be_nil
48
59
  BigML::Dataset.all.should have(0).datasets
49
60
  end
61
+
62
+ it "can be converted in a model" do
63
+ model = @dataset.to_model
64
+ model.instance_of?(BigML::Model).should be_true
65
+ model.code.should == 201
66
+ end
50
67
  end
51
68
  end
@@ -44,10 +44,27 @@ describe BigML::Model, :vcr do
44
44
  BigML::Model.find(@model.id).name.should == 'foo name'
45
45
  end
46
46
 
47
+ it "must be able to update the name from the instance" do
48
+ @model.update( :name => 'foo name' ).code.should == 202
49
+ BigML::Model.find(@model.id).name.should == 'foo name'
50
+ end
51
+
47
52
  it "must be able to remove the model" do
48
53
  BigML::Model.delete(@model.id)
49
54
  BigML::Model.find(@model.id).should be_nil
50
55
  BigML::Model.all.should have(0).models
51
56
  end
57
+
58
+ it "must be able to be deleted using the destroy method" do
59
+ model_id = @model.id
60
+ @model.destroy
61
+ BigML::Model.find(model_id).should be_nil
62
+ end
63
+
64
+ it "can be converted in a prediction" do
65
+ prediction = @model.to_prediction(:input_data => { "000001" => 3 })
66
+ prediction.instance_of?(BigML::Prediction).should be_true
67
+ prediction.code.should == 201
68
+ end
52
69
  end
53
70
  end
@@ -46,10 +46,21 @@ describe BigML::Prediction, :vcr do
46
46
  BigML::Prediction.find(@prediction.id).name.should == 'foo name'
47
47
  end
48
48
 
49
+ it "must be able to update the name from the instance" do
50
+ @prediction.update( :name => 'foo name' ).code.should == 202
51
+ BigML::Prediction.find(@prediction.id).name.should == 'foo name'
52
+ end
53
+
49
54
  it "must be able to remove the prediction" do
50
55
  BigML::Prediction.delete(@prediction.id)
51
56
  BigML::Prediction.find(@prediction.id).should be_nil
52
57
  BigML::Prediction.all.should have(0).predictions
53
58
  end
59
+
60
+ it "must be able to be deleted using the destroy method" do
61
+ prediction_id = @prediction.id
62
+ @prediction.destroy
63
+ BigML::Prediction.find(prediction_id).should be_nil
64
+ end
54
65
  end
55
66
  end
@@ -3,12 +3,7 @@ require "spec_helper"
3
3
  describe BigML::Source, :vcr do
4
4
 
5
5
  before(:each) do
6
- BigML::Source.all.each do |s|
7
- BigML::Source.delete(s.id)
8
- end
9
- BigML::Dataset.all.each do |s|
10
- BigML::Dataset.delete(s.id)
11
- end
6
+ BigML::Source.delete_all
12
7
  end
13
8
 
14
9
  describe "no source" do
@@ -46,10 +41,28 @@ describe BigML::Source, :vcr do
46
41
  BigML::Source.find(@source.id).name.should == 'new name'
47
42
  end
48
43
 
44
+ it "must be able to update the name from the instance" do
45
+ BigML::Source.find(@source.id).name.should == 'iris.csv'
46
+ @source.update( :name => 'new name' ).code.should == 202
47
+ BigML::Source.find(@source.id).name.should == 'new name'
48
+ end
49
+
49
50
  it "must be able to remove the source" do
50
51
  BigML::Source.delete(@source.id)
51
52
  BigML::Source.find(@source.id).should be_nil
52
53
  BigML::Source.all.should have(0).sources
53
54
  end
55
+
56
+ it "must be able to be deleted using the destroy method" do
57
+ source_id = @source.id
58
+ @source.destroy
59
+ BigML::Source.find(source_id).should be_nil
60
+ end
61
+
62
+ it "can be converted in a dataset" do
63
+ dataset = @source.to_dataset
64
+ dataset.instance_of?(BigML::Dataset).should be_true
65
+ dataset.code.should == 201
66
+ end
54
67
  end
55
68
  end
@@ -1,8 +1,8 @@
1
1
  require 'spec_helper'
2
2
 
3
- describe BigML::Client do
3
+ describe BigML::Util::Client do
4
4
  let(:keys) {
5
- BigML::Config::VALID_OPTIONS_KEYS
5
+ BigML::Util::Config::VALID_OPTIONS_KEYS
6
6
  }
7
7
 
8
8
  context "module configuration" do
@@ -13,7 +13,7 @@ describe BigML::Client do
13
13
  }
14
14
 
15
15
  it "should inherit module configuration" do
16
- api = BigML::Client.new
16
+ api = BigML::Util::Client.new
17
17
  keys.each { |key| api.send(key).should == key }
18
18
  end
19
19
  end
@@ -28,7 +28,7 @@ describe BigML::Client do
28
28
 
29
29
  context "during initialization" do
30
30
  it "should override module configuration" do
31
- api = BigML::Client.new(credentials)
31
+ api = BigML::Util::Client.new(credentials)
32
32
  keys.each { |key| api.send(key).should == credentials[key] }
33
33
  end
34
34
  end
@@ -41,7 +41,7 @@ describe BigML::Client do
41
41
  }
42
42
 
43
43
  it "should override module configuration" do
44
- api = BigML::Client.new
44
+ api = BigML::Util::Client.new
45
45
  credentials.each { |key, value| api.send("#{key}=", value) }
46
46
  keys.each { |key| api.send(key).should == credentials[key] }
47
47
  end