big_ml 0.1.2 → 0.1.3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/.rspec +1 -1
  3. data/.rvmrc.example +1 -1
  4. data/.travis.yml +1 -1
  5. data/README.md +6 -5
  6. data/lib/big_ml.rb +5 -0
  7. data/lib/big_ml/base.rb +31 -0
  8. data/lib/big_ml/batch_prediction.rb +39 -0
  9. data/lib/big_ml/ensemble.rb +32 -0
  10. data/lib/big_ml/evaluation.rb +31 -0
  11. data/lib/big_ml/prediction.rb +2 -2
  12. data/lib/big_ml/util/client.rb +1 -0
  13. data/lib/big_ml/util/config.rb +4 -1
  14. data/lib/big_ml/util/request.rb +9 -4
  15. data/lib/big_ml/version.rb +1 -1
  16. data/spec/integration/dataset_spec.rb +21 -23
  17. data/spec/integration/ensemble_spec.rb +73 -0
  18. data/spec/integration/evaluation_spec.rb +64 -0
  19. data/spec/integration/model_spec.rb +23 -25
  20. data/spec/integration/prediction_spec.rb +20 -22
  21. data/spec/integration/source_spec.rb +22 -24
  22. data/spec/spec_helper.rb +4 -3
  23. data/spec/units/client_spec.rb +58 -26
  24. data/spec/units/source_spec.rb +3 -3
  25. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/can_be_converted_in_a_model.yml +180 -99
  26. data/spec/vcr_cassettes/BigML_Dataset/one_dataset/must_be_able_to_be_find_using_the_reference.yml +144 -163
  27. data/spec/vcr_cassettes/BigML_Ensemble/no_ensemble/_all/must_be_empty.yml +223 -0
  28. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/can_be_converted_in_a_prediction.yml +1074 -0
  29. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_be_able_to_be_deleted_using_the_destroy_method.yml +1082 -0
  30. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_be_able_to_be_find_using_the_reference.yml +734 -0
  31. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_be_able_to_remove_the_ensemble.yml +1215 -0
  32. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_be_able_to_set_number_of_models.yml +853 -0
  33. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_be_able_to_update_the_name.yml +1226 -0
  34. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_be_able_to_update_the_name_from_the_instance.yml +1226 -0
  35. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_have_only_one_item.yml +686 -0
  36. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/must_have_the_same_size.yml +732 -0
  37. data/spec/vcr_cassettes/BigML_Ensemble/one_ensemble/was_created_successfully.yml +495 -0
  38. data/spec/vcr_cassettes/BigML_Evaluation/no_evaluation/_all/must_be_empty.yml +600 -0
  39. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/must_be_able_to_be_deleted_using_the_destroy_method.yml +1127 -0
  40. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/must_be_able_to_be_find_using_the_reference.yml +1151 -0
  41. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/must_be_able_to_remove_the_evaluation.yml +1203 -0
  42. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/must_be_able_to_update_the_name.yml +1374 -0
  43. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/must_be_able_to_update_the_name_from_the_instance.yml +1373 -0
  44. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/must_have_only_one_item.yml +1103 -0
  45. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/must_have_the_same_name.yml +1108 -0
  46. data/spec/vcr_cassettes/BigML_Evaluation/one_evaluation/was_created_successfully.yml +922 -0
  47. data/spec/vcr_cassettes/BigML_Model/one_model/must_be_able_to_be_find_using_the_reference.yml +269 -282
  48. data/spec/vcr_cassettes/BigML_Prediction/one_prediction/must_be_able_to_be_find_using_the_reference.yml +360 -312
  49. data/spec/vcr_cassettes/BigML_Source/one_source/must_be_able_to_be_find_using_the_reference.yml +75 -72
  50. data/spec/vcr_cassettes/BigML_Util_Client/response_handling/debug_mode/raises_on_bad_request.yml +38 -0
  51. data/spec/vcr_cassettes/BigML_Util_Client/response_handling/normal_mode/does_not_raise_on_bad_request.yml +38 -0
  52. metadata +74 -43
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: a50eda2eba99d6592ba297ee52e9d83b50542e91
4
+ data.tar.gz: 81deabae37c7a0216e89f51a548d98340895cc1d
5
+ SHA512:
6
+ metadata.gz: 48b9bc705f8ae24e698e812e4568a9b3f655d55d15f70293f9bfbbf6750473dceaca8c2f1f3d8e978d1fe386bf015c51b69f674fd380122ce9e758131326af1c
7
+ data.tar.gz: ca6dc1b896895752e275e108ee6d73aa35f23b5dbad4ff3b37733315b9eecae0cc27b22fe4f7639c4a5f5ff9e2b7d05ae2eda4a796419059c44a6bdcab543a05
data/.rspec CHANGED
@@ -1,2 +1,2 @@
1
- --format nested
1
+ --format documentation
2
2
  --color
data/.rvmrc.example CHANGED
@@ -1 +1 @@
1
- rvm use --create ruby-1.9.3-p194@bigml > /dev/null
1
+ rvm use --create ruby-2.1.0@bigml > /dev/null
data/.travis.yml CHANGED
@@ -1,4 +1,4 @@
1
1
  language: ruby
2
2
  rvm:
3
- - 1.9.2
4
3
  - 1.9.3
4
+ - 2.1.0
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # BigML
2
2
 
3
- A Ruby wrapper for the [BigML REST API](https://bigml.com/developers), provides access to sources, datasets, models, and predictions. You can create, retrieve, update and delete
3
+ A Ruby wrapper for the [BigML REST API](https://bigml.com/developers), provides access to sources, datasets, models, and predictions. You can create, retrieve, update and delete
4
4
 
5
5
  [![Build Status](https://secure.travis-ci.org/vigosan/big_ml.png?branch=master)](http://travis-ci.org/vigosan/big_ml)
6
6
 
@@ -14,6 +14,7 @@ BigML.configure do |c|
14
14
  c.username = 'foo'
15
15
  c.api_key = 'bar'
16
16
  c.dev_mode = true
17
+ #c.debug = true # Raises errors for bad requests
17
18
  end
18
19
 
19
20
  # easy creation of sources
@@ -26,7 +27,7 @@ all_datasets = BigML::Dataset.all
26
27
  # or just want to create one?
27
28
  dataset = source.to_dataset
28
29
 
29
- # find a concrete one
30
+ # find a concrete one
30
31
  model = BigML::Model.find("4fe8868a035d07682f002891")
31
32
 
32
33
  # create you predictions using parameters...
@@ -66,10 +67,10 @@ Install the gem with rubygem in your system:
66
67
 
67
68
  This library has been tested on the following ruby interpreters:
68
69
 
69
- * MRI 1.9.2
70
- * MRI 1.9.3
70
+ * 1.9.3
71
+ * 2.1.0
71
72
 
72
- ## Running the tests
73
+ ## Running the tests
73
74
 
74
75
  Download and run the test suit:
75
76
 
data/lib/big_ml.rb CHANGED
@@ -2,13 +2,18 @@ require 'big_ml/util/client'
2
2
  require 'big_ml/model'
3
3
  require 'big_ml/dataset'
4
4
  require 'big_ml/prediction'
5
+ require 'big_ml/batch_prediction'
5
6
  require 'big_ml/source'
7
+ require 'big_ml/evaluation'
8
+ require 'big_ml/ensemble'
6
9
 
7
10
  require 'big_ml/util/config'
8
11
 
9
12
  module BigML
10
13
  extend Util::Config
11
14
 
15
+ UnsuccessfulRequestError = Class.new(StandardError)
16
+
12
17
  class << self
13
18
  def new(options = {})
14
19
  Client.new(options)
data/lib/big_ml/base.rb CHANGED
@@ -1,4 +1,5 @@
1
1
  require 'big_ml/util/client'
2
+ require 'timeout'
2
3
 
3
4
  module BigML
4
5
  class Base
@@ -23,6 +24,22 @@ module BigML
23
24
  self.class.update(id, options)
24
25
  end
25
26
 
27
+ def ready?
28
+ success? || error?
29
+ end
30
+
31
+ def success?
32
+ status["code"] == 5
33
+ end
34
+
35
+ def error?
36
+ status["code"] < 0
37
+ end
38
+
39
+ def wait_for_ready(options = {})
40
+ BigML::Base.wait_for_ready(self, options)
41
+ end
42
+
26
43
  class << self
27
44
  def all(options = {})
28
45
  response = client.get("/#{resource_name}", options)
@@ -47,6 +64,20 @@ module BigML
47
64
  all.each {|s| delete(s.id) }
48
65
  end
49
66
 
67
+ # Keeps reloading resource until it is ready
68
+ def wait_for_ready(resource, options = {})
69
+ klass = resource.class
70
+ remaining = options.fetch :timeout, 60
71
+ delay = options.fetch :delay, 1
72
+ until resource && resource.ready?
73
+ resource = klass.find(resource.id)
74
+ sleep(delay) unless resource.ready?
75
+ remaining = remaining - delay
76
+ raise Timeout::Error if remaining <= 0
77
+ end
78
+ resource
79
+ end
80
+
50
81
  private
51
82
 
52
83
  def client
@@ -0,0 +1,39 @@
1
+ require 'big_ml/base'
2
+
3
+ module BigML
4
+ class BatchPrediction < Base
5
+ BATCH_PREDICTION_PROPERTIES = [
6
+ :category, :code, :created, :credits, :dataset, :dataset_status,
7
+ :description, :fields, :dataset, :model, :model_status, :name,
8
+ :objective_fields, :prediction, :prediction_path, :private, :resource,
9
+ :source, :source_status, :status, :tags, :updated
10
+ ]
11
+
12
+ attr_reader *BATCH_PREDICTION_PROPERTIES
13
+
14
+ class << self
15
+ def create(model_or_ensemble, dataset, options = {})
16
+ arguments = { dataset: dataset }
17
+ if model_or_ensemble.start_with? 'model'
18
+ arguments[:model] = model_or_ensemble
19
+ elsif model_or_ensemble.start_with? 'ensemble'
20
+ arguments[:ensemble] = model_or_ensemble
21
+ else
22
+ raise ArgumentError, "Expected model or ensemble, got #{model_or_ensemble}"
23
+ end
24
+ response = client.post("/#{resource_name}", {}, arguments.merge(options))
25
+ self.new(response) if response.success?
26
+ end
27
+
28
+ def download(id)
29
+ response = client.get("/#{resource_name}/#{id}/download")
30
+ response.body if response.success?
31
+ end
32
+ end
33
+
34
+ def download
35
+ self.class.download(id)
36
+ end
37
+
38
+ end
39
+ end
@@ -0,0 +1,32 @@
1
+ require 'big_ml/base'
2
+
3
+ module BigML
4
+ class Ensemble < Base
5
+ ENSEMBLE_PROPERTIES = [
6
+ :category, :code, :columns, :created, :credits,
7
+ :dataset, :dataset_status, :description, :dev, :distributions,
8
+ :holdout, :error_models, :excluded_fields, :finished_models,
9
+ :input_fields, :locale, :max_columns, :max_rows, :model_order,
10
+ :models, :name, :node_threshold, :number_of_batchpredictions,
11
+ :number_of_evaluations, :number_of_models, :number_of_predictions,
12
+ :number_of_public_predictions, :objective_field, :ordering,
13
+ :out_of_bag, :price, :private, :randomize, :random_candidates,
14
+ :range, :replacement, :resource, :rows, :sample_rate, :seed,
15
+ :shared, :shared_hash, :sharing_key, :size, :source, :source_status,
16
+ :status, :subscription, :tags, :tlp, :updated
17
+ ]
18
+
19
+ attr_reader *ENSEMBLE_PROPERTIES
20
+
21
+ def to_prediction(options)
22
+ Prediction.create(resource, options)
23
+ end
24
+
25
+ class << self
26
+ def create(dataset, options = {})
27
+ response = client.post("/#{resource_name}", {}, options.merge(dataset: dataset))
28
+ self.new(response) if response.success?
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,31 @@
1
+ require 'big_ml/base'
2
+
3
+ module BigML
4
+ class Evaluation < Base
5
+ EVALUATION_PROPERTIES = [
6
+ :category, :code, :combiner, :created, :credits, :dataset, :dataset_status,
7
+ :description, :dev, :ensemble, :fields_map, :locale, :max_rows, :missing_strategy,
8
+ :model, :model_status, :model_type, :name, :number_of_models, :ordering,
9
+ :out_of_bag, :private, :range, :replacement, :resource, :result, :rows, :sample_rate,
10
+ :shared, :shared_hash, :sharing_key, :size, :status, :subscription, :tags,
11
+ :threshold, :type, :updated
12
+ ]
13
+
14
+ attr_reader *EVALUATION_PROPERTIES
15
+
16
+ class << self
17
+ def create(model_or_ensemble, dataset, options = {})
18
+ arguments = { dataset: dataset }
19
+ if model_or_ensemble.start_with? 'model'
20
+ arguments[:model] = model_or_ensemble
21
+ elsif model_or_ensemble.start_with? 'ensemble'
22
+ arguments[:ensemble] = model_or_ensemble
23
+ else
24
+ raise ArgumentError, "Expected model or ensemble, got #{model_or_ensemble}"
25
+ end
26
+ response = client.post("/#{resource_name}", {}, arguments.merge(options))
27
+ self.new(response) if response.success?
28
+ end
29
+ end
30
+ end
31
+ end
@@ -12,8 +12,8 @@ module BigML
12
12
  attr_reader *PREDICTION_PROPERTIES
13
13
 
14
14
  class << self
15
- def create(model, options = {})
16
- response = client.post("/#{resource_name}", {}, { :model => model }.merge!(options))
15
+ def create(model_or_ensemble, options = {})
16
+ response = client.post("/#{resource_name}", {}, { model_or_ensemble.split('/').first.to_sym => model_or_ensemble }.merge!(options))
17
17
  self.new(response) if response.success?
18
18
  end
19
19
  end
@@ -12,6 +12,7 @@ module BigML
12
12
  format :json
13
13
 
14
14
  attr_accessor *Config::VALID_OPTIONS_KEYS
15
+ alias_method :debug?, :debug
15
16
 
16
17
  def initialize(attrs={})
17
18
  attrs = BigML.options.merge(attrs)
@@ -11,8 +11,10 @@ module BigML
11
11
  DEFAULT_BIGML_API_KEY = nil
12
12
  # Set default mode to production
13
13
  DEFAULT_IN_DEV_MODE = false
14
+ # Set default debug mode to off
15
+ DEFAULT_IN_DEBUG_MODE = false
14
16
  # An array of valid keys in the options hash when configuring a {BigML::Client}
15
- VALID_OPTIONS_KEYS = [:username, :api_key, :dev_mode]
17
+ VALID_OPTIONS_KEYS = [:username, :api_key, :dev_mode, :debug]
16
18
 
17
19
  attr_accessor *VALID_OPTIONS_KEYS
18
20
 
@@ -31,6 +33,7 @@ module BigML
31
33
  self.username = DEFAULT_BIGML_USERNAME
32
34
  self.api_key = DEFAULT_BIGML_API_KEY
33
35
  self.dev_mode = DEFAULT_IN_DEV_MODE
36
+ self.debug = DEFAULT_IN_DEBUG_MODE
34
37
  end
35
38
  end
36
39
  end
@@ -1,19 +1,19 @@
1
1
  module BigML
2
2
  module Request
3
3
  def get(path, options = {}, body = {})
4
- self.class.get(path, prepare_options(options, body))
4
+ handle_response self.class.get(path, prepare_options(options, body))
5
5
  end
6
6
 
7
7
  def put(path, options = {}, body = {})
8
- self.class.put(path, prepare_options(options, body))
8
+ handle_response self.class.put(path, prepare_options(options, body))
9
9
  end
10
10
 
11
11
  def post(path, options = {}, body = {})
12
- self.class.post(path, prepare_options(options, body))
12
+ handle_response self.class.post(path, prepare_options(options, body))
13
13
  end
14
14
 
15
15
  def delete(path, options = {}, body = {})
16
- self.class.delete(path, prepare_options(options, body))
16
+ handle_response self.class.delete(path, prepare_options(options, body))
17
17
  end
18
18
 
19
19
  private
@@ -25,5 +25,10 @@ module BigML
25
25
  { :headers => {'content-type' => 'application/json'}, :query => options, :body => body.to_json }
26
26
  end
27
27
  end
28
+
29
+ def handle_response(response)
30
+ debug? && !response.success? and raise UnsuccessfulRequestError, response.inspect
31
+ response
32
+ end
28
33
  end
29
34
  end
@@ -1,3 +1,3 @@
1
1
  module BigML
2
- VERSION = "0.1.2"
2
+ VERSION = "0.1.3"
3
3
  end
@@ -2,7 +2,7 @@ require "spec_helper"
2
2
 
3
3
  describe BigML::Dataset, :vcr do
4
4
 
5
- before(:each) do
5
+ before do
6
6
  BigML::Source.delete_all
7
7
  BigML::Dataset.delete_all
8
8
  end
@@ -10,59 +10,57 @@ describe BigML::Dataset, :vcr do
10
10
  describe "no dataset" do
11
11
  describe ".all" do
12
12
  it "must be empty" do
13
- BigML::Dataset.all.should == []
13
+ expect(BigML::Dataset.all).to eq([])
14
14
  end
15
15
  end
16
16
  end
17
17
 
18
18
  describe "one dataset" do
19
- before do
20
- @source = BigML::Source.create("spec/fixtures/iris.csv")
21
- @dataset = BigML::Dataset.create(@source.resource)
22
- end
19
+ let(:source) { BigML::Source.create "spec/fixtures/iris.csv" }
20
+ let(:dataset) { BigML::Dataset.create source.resource }
23
21
 
24
22
  it "was created successfully" do
25
- @dataset.code.should == 201
23
+ expect(dataset.code).to eq(201)
26
24
  end
27
25
 
28
26
  it "must have only one item" do
29
- BigML::Dataset.all.should have(1).datasets
27
+ expect(BigML::Dataset.all.length).to eq(1)
30
28
  end
31
29
 
32
30
  it "must have the same file_name" do
33
- BigML::Dataset.all.first.size.should == 4608
31
+ expect(BigML::Dataset.all.first.size).to eq(4608)
34
32
  end
35
33
 
36
34
  it "must be able to be find using the reference" do
37
- BigML::Dataset.find(@dataset.id) == @dataset
35
+ expect(BigML::Dataset.find(dataset.id).id).to eq(dataset.id)
38
36
  end
39
37
 
40
38
  it "must be able to update the name" do
41
- BigML::Dataset.update(@dataset.id, { :name => 'foo name' }).code.should == 202
42
- BigML::Dataset.find(@dataset.id).name.should == 'foo name'
39
+ expect(BigML::Dataset.update(dataset.id, { name: 'foo name' }).code).to eq(202)
40
+ expect(BigML::Dataset.find(dataset.id).name).to eq('foo name')
43
41
  end
44
42
 
45
43
  it "must be able to update the name from the instance" do
46
- @dataset.update(:name => 'foo name').code.should == 202
47
- BigML::Dataset.find(@dataset.id).name.should == 'foo name'
44
+ expect(dataset.update(name: 'foo name').code).to eq(202)
45
+ expect(BigML::Dataset.find(dataset.id).name).to eq('foo name')
48
46
  end
49
47
 
50
48
  it "must be able to be deleted using the destroy method" do
51
- dataset_id = @dataset.id
52
- @dataset.destroy
53
- BigML::Dataset.find(dataset_id).should be_nil
49
+ dataset_id = dataset.id
50
+ dataset.destroy
51
+ expect(BigML::Dataset.find(dataset_id)).to be_nil
54
52
  end
55
53
 
56
54
  it "must be able to remove the dataset" do
57
- BigML::Dataset.delete(@dataset.id)
58
- BigML::Dataset.find(@dataset.id).should be_nil
59
- BigML::Dataset.all.should have(0).datasets
55
+ BigML::Dataset.delete(dataset.id)
56
+ expect(BigML::Dataset.find(dataset.id)).to be_nil
57
+ expect(BigML::Dataset.all.length).to eq(0)
60
58
  end
61
59
 
62
60
  it "can be converted in a model" do
63
- model = @dataset.to_model
64
- model.instance_of?(BigML::Model).should be_true
65
- model.code.should == 201
61
+ model = dataset.to_model
62
+ expect(model).to be_instance_of(BigML::Model)
63
+ expect(model.code).to eq(201)
66
64
  end
67
65
  end
68
66
  end
@@ -0,0 +1,73 @@
1
+ require "spec_helper"
2
+
3
+ describe BigML::Ensemble, :vcr do
4
+
5
+ before do
6
+ BigML::Source.delete_all
7
+ BigML::Dataset.delete_all
8
+ BigML::Ensemble.delete_all
9
+ end
10
+
11
+ describe "no ensemble" do
12
+ describe ".all" do
13
+ it "must be empty" do
14
+ expect(BigML::Ensemble.all).to eq([])
15
+ end
16
+ end
17
+ end
18
+
19
+ describe "one ensemble" do
20
+ let(:source) { BigML::Source.create 'spec/fixtures/iris.csv' }
21
+ let(:dataset) { BigML::Dataset.create source.wait_for_ready.resource }
22
+ let!(:ensemble) { BigML::Ensemble.create dataset.wait_for_ready.resource, number_of_models: 2 }
23
+
24
+ it "was created successfully" do
25
+ expect(ensemble.code).to eq(201)
26
+ end
27
+
28
+ it "must have only one item" do
29
+ expect(BigML::Ensemble.all.length).to eq(1)
30
+ end
31
+
32
+ it "must have the same size" do
33
+ expect(BigML::Ensemble.all.first.size).to eq(9216)
34
+ end
35
+
36
+ it "must be able to set number of models" do
37
+ BigML::Ensemble.create dataset.wait_for_ready.resource, number_of_models: 3
38
+ expect(BigML::Ensemble.all.first.number_of_models).to eq(3)
39
+ end
40
+
41
+ it "must be able to be find using the reference" do
42
+ expect(BigML::Ensemble.find(ensemble.id).id).to eq(ensemble.id)
43
+ end
44
+
45
+ it "must be able to update the name" do
46
+ expect(BigML::Ensemble.update(ensemble.wait_for_ready.id, { name: 'foo name' }).code).to eq(202)
47
+ expect(BigML::Ensemble.find(ensemble.id).name).to eq('foo name')
48
+ end
49
+
50
+ it "must be able to update the name from the instance" do
51
+ expect(ensemble.wait_for_ready.update(name: 'foo name').code).to eq(202)
52
+ expect(BigML::Ensemble.find(ensemble.id).name).to eq('foo name')
53
+ end
54
+
55
+ it "must be able to remove the ensemble" do
56
+ BigML::Ensemble.delete ensemble.wait_for_ready.id
57
+ expect(BigML::Ensemble.find ensemble.id).to be_nil
58
+ expect(BigML::Ensemble.all.length).to eq(0)
59
+ end
60
+
61
+ it "must be able to be deleted using the destroy method" do
62
+ ensemble_id = ensemble.id
63
+ ensemble.wait_for_ready.destroy
64
+ expect(BigML::Ensemble.find ensemble_id).to be_nil
65
+ end
66
+
67
+ it "can be converted in a prediction" do
68
+ prediction = ensemble.wait_for_ready.to_prediction(input_data: { "000001" => 3 })
69
+ expect(ensemble).to be_instance_of(BigML::Ensemble)
70
+ expect(ensemble.code).to eq(201)
71
+ end
72
+ end
73
+ end