spark_api 1.4.34 → 1.5.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +1 -1
  3. data/VERSION +1 -1
  4. data/lib/spark_api/authentication/api_auth.rb +1 -1
  5. data/lib/spark_api/authentication/oauth2.rb +1 -1
  6. data/lib/spark_api/authentication/oauth2_impl/grant_type_base.rb +1 -1
  7. data/lib/spark_api/client.rb +2 -2
  8. data/lib/spark_api/request.rb +1 -1
  9. data/spec/spec_helper.rb +9 -4
  10. data/spec/unit/spark_api/authentication/api_auth_spec.rb +21 -22
  11. data/spec/unit/spark_api/authentication/base_auth_spec.rb +3 -3
  12. data/spec/unit/spark_api/authentication/oauth2_impl/faraday_middleware_spec.rb +1 -1
  13. data/spec/unit/spark_api/authentication/oauth2_impl/grant_type_base_spec.rb +1 -1
  14. data/spec/unit/spark_api/authentication/oauth2_impl/single_session_provider_spec.rb +2 -2
  15. data/spec/unit/spark_api/authentication/oauth2_spec.rb +40 -40
  16. data/spec/unit/spark_api/authentication_spec.rb +2 -2
  17. data/spec/unit/spark_api/configuration/yaml_spec.rb +44 -44
  18. data/spec/unit/spark_api/configuration_spec.rb +56 -57
  19. data/spec/unit/spark_api/faraday_middleware_spec.rb +12 -12
  20. data/spec/unit/spark_api/models/account_spec.rb +20 -20
  21. data/spec/unit/spark_api/models/activity_spec.rb +5 -5
  22. data/spec/unit/spark_api/models/base_spec.rb +32 -32
  23. data/spec/unit/spark_api/models/concerns/destroyable_spec.rb +2 -2
  24. data/spec/unit/spark_api/models/concerns/savable_spec.rb +19 -19
  25. data/spec/unit/spark_api/models/connect_prefs_spec.rb +1 -1
  26. data/spec/unit/spark_api/models/constraint_spec.rb +1 -1
  27. data/spec/unit/spark_api/models/contact_spec.rb +50 -50
  28. data/spec/unit/spark_api/models/dirty_spec.rb +12 -12
  29. data/spec/unit/spark_api/models/document_spec.rb +3 -3
  30. data/spec/unit/spark_api/models/fields_spec.rb +17 -17
  31. data/spec/unit/spark_api/models/finders_spec.rb +7 -7
  32. data/spec/unit/spark_api/models/floplan_spec.rb +4 -4
  33. data/spec/unit/spark_api/models/listing_cart_spec.rb +46 -46
  34. data/spec/unit/spark_api/models/listing_meta_translations_spec.rb +6 -6
  35. data/spec/unit/spark_api/models/listing_spec.rb +91 -91
  36. data/spec/unit/spark_api/models/message_spec.rb +10 -10
  37. data/spec/unit/spark_api/models/note_spec.rb +10 -10
  38. data/spec/unit/spark_api/models/notification_spec.rb +6 -6
  39. data/spec/unit/spark_api/models/open_house_spec.rb +4 -4
  40. data/spec/unit/spark_api/models/photo_spec.rb +8 -8
  41. data/spec/unit/spark_api/models/portal_spec.rb +4 -4
  42. data/spec/unit/spark_api/models/property_types_spec.rb +5 -5
  43. data/spec/unit/spark_api/models/rental_calendar_spec.rb +13 -11
  44. data/spec/unit/spark_api/models/rule_spec.rb +2 -2
  45. data/spec/unit/spark_api/models/saved_search_spec.rb +33 -33
  46. data/spec/unit/spark_api/models/search_template/quick_search_spec.rb +5 -5
  47. data/spec/unit/spark_api/models/shared_listing_spec.rb +12 -12
  48. data/spec/unit/spark_api/models/sort_spec.rb +3 -3
  49. data/spec/unit/spark_api/models/standard_fields_spec.rb +12 -12
  50. data/spec/unit/spark_api/models/subresource_spec.rb +18 -18
  51. data/spec/unit/spark_api/models/system_info_spec.rb +7 -7
  52. data/spec/unit/spark_api/models/tour_of_home_spec.rb +3 -3
  53. data/spec/unit/spark_api/models/video_spec.rb +9 -9
  54. data/spec/unit/spark_api/models/virtual_tour_spec.rb +7 -7
  55. data/spec/unit/spark_api/models/vow_account_spec.rb +8 -8
  56. data/spec/unit/spark_api/multi_client_spec.rb +14 -14
  57. data/spec/unit/spark_api/options_hash_spec.rb +4 -4
  58. data/spec/unit/spark_api/paginate_spec.rb +71 -71
  59. data/spec/unit/spark_api/primary_array_spec.rb +5 -5
  60. data/spec/unit/spark_api/request_spec.rb +60 -60
  61. data/spec/unit/spark_api_spec.rb +6 -6
  62. metadata +162 -233
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d372159752425b341adabf6fe1bf0361d28e4c1674f09f553b9e911f6294ac24
4
- data.tar.gz: cad48a5f4871cb19982be6ba6fd01163957aaf4b552156066e7ed5ebe5581538
3
+ metadata.gz: 17eaaa74af21a3a858941c286e4731ae9d67d1b315f06f5a3409d78d7d4fa89e
4
+ data.tar.gz: 51373eaa638eefed5245d88d78cc0e1374d75cdfe385c3c358d4d40348059514
5
5
  SHA512:
6
- metadata.gz: ea1807cfe81a706d50eb70a59a9e831720d69c46318a414dbf1e2c0bc0e75d31f289d6686a21644224efde694993bccff0d0659d0ac8bd873754c6ccf1802963
7
- data.tar.gz: 8a6a226f3883d1f2bc9cb60bdaddf328e2e027f7d26452c0a60299ddd2ac99045cb19d8b1e55c00a35b15d418361d28f2aecadd750508ab68612f3c0e435fb73
6
+ metadata.gz: 7d81cd63e089a1182557cdeb95ab1e52d526904879294f66e22026c68d3dd4dd5ec8408763fd9d62ac72a9fcf23964ee9eec3d5e6e104a214e33c5a46ece21f7
7
+ data.tar.gz: c44265855abe3384dea240410e96230b68fca9f9ac3cef702feb1ff0e05d026d571833591d5fa4865c21f04dc9f48d7f2987ffc9895088ac5c4431d73fa9b3c0
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  Spark API
2
2
  =====================
3
- [![Build Status](https://travis-ci.org/sparkapi/spark_api.png?branch=master)](http://travis-ci.org/sparkapi/spark_api) [![Code Climate](https://codeclimate.com/badge.png)](https://codeclimate.com/github/sparkapi/spark_api)
3
+ ![CI](https://github.com/sparkapi/spark_api/workflows/CI/badge.svg) ![Code Climate](https://codeclimate.com/badge.png)
4
4
 
5
5
  A Ruby wrapper for the Spark REST API. Loosely based on ActiveResource to provide models to interact with remote services.
6
6
 
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.4.34
1
+ 1.5.0
@@ -62,7 +62,7 @@ module SparkApi
62
62
 
63
63
  # Perform an HTTP request (no data)
64
64
  def request(method, path, body, options)
65
- escaped_path = URI.escape(path)
65
+ escaped_path = Addressable::URI.escape(path)
66
66
  request_opts = {
67
67
  :AuthToken => @session.auth_token
68
68
  }
@@ -40,7 +40,7 @@ module SparkApi
40
40
 
41
41
  # Perform an HTTP request (no data)
42
42
  def request(method, path, body, options={})
43
- escaped_path = URI.escape(path)
43
+ escaped_path = Addressable::URI.escape(path)
44
44
  connection = @client.connection(true) # SSL Only!
45
45
  connection.headers.merge!(self.auth_header)
46
46
 
@@ -45,7 +45,7 @@ module SparkApi
45
45
  response.expires_in = provider.session_timeout if response.expires_in.nil?
46
46
  SparkApi.logger.debug { "[oauth2] New session created #{response}" }
47
47
  response
48
- rescue Faraday::Error::ConnectionFailed => e
48
+ rescue Faraday::ConnectionFailed => e
49
49
  if @client.ssl_verify && e.message =~ /certificate verify failed/
50
50
  SparkApi.logger.error { SparkApi::Errors.ssl_verification_error }
51
51
  end
@@ -2,7 +2,7 @@ module SparkApi
2
2
  # =API Client
3
3
  # Main class to setup and run requests on the API. A default client is accessible globally as
4
4
  # SparkApi::client if the global configuration has been set as well. Otherwise, this class may
5
- # be instanciated separately with the configuration information.
5
+ # be instantiated separately with the configuration information.
6
6
  class Client
7
7
  include Connection
8
8
  include Authentication
@@ -21,7 +21,7 @@ module SparkApi
21
21
  Configuration::VALID_OPTION_KEYS.each do |key|
22
22
  send("#{key}=", options[key])
23
23
  end
24
- # Instanciate the authenication class passed in.
24
+ # Instantiate the authentication class passed in.
25
25
  @authenticator = authentication_mode.send("new", self)
26
26
  end
27
27
 
@@ -99,7 +99,7 @@ module SparkApi
99
99
  else
100
100
  return response.body
101
101
  end
102
- rescue Faraday::Error::ConnectionFailed => e
102
+ rescue Faraday::ConnectionFailed => e
103
103
  if self.ssl_verify && e.message =~ /certificate verify failed/
104
104
  SparkApi.logger.error { SparkApi::Errors.ssl_verification_error }
105
105
  end
@@ -11,7 +11,6 @@ end
11
11
 
12
12
  require "rubygems"
13
13
  require "rspec"
14
- require 'rspec/autorun'
15
14
  require 'webmock/rspec'
16
15
  require "json"
17
16
  require 'multi_json'
@@ -22,6 +21,13 @@ require path + '/spark_api'
22
21
 
23
22
  require 'spark_api'
24
23
 
24
+ if ENV['COVERAGE'] == "on"
25
+ require 'simplecov'
26
+ require 'simplecov-rcov'
27
+ SimpleCov.formatter = SimpleCov::Formatter::RcovFormatter
28
+ SimpleCov.start { add_filter %w(/vendor /spec /test) }
29
+ end
30
+
25
31
  FileUtils.mkdir 'log' unless File.exists? 'log'
26
32
 
27
33
  module SparkApi
@@ -48,17 +54,16 @@ end
48
54
  Dir[File.expand_path(File.join(File.dirname(__FILE__),'support','**','*.rb'))].each {|f| require f}
49
55
 
50
56
  RSpec.configure do |config|
51
-
57
+
52
58
  config.include WebMock::API
53
59
  config.include StubApiRequests
54
60
 
55
- config.treat_symbols_as_metadata_keys_with_true_values = true
56
61
  config.alias_example_to :on_get_it, :method => 'GET'
57
62
  config.alias_example_to :on_put_it, :method => 'PUT'
58
63
  config.alias_example_to :on_post_it, :method => 'POST'
59
64
  config.alias_example_to :on_delete_it, :method => 'DELETE'
60
65
  config.before(:all) { reset_config }
61
- config.color_enabled = true
66
+ config.color = true
62
67
  end
63
68
 
64
69
  def jruby?
@@ -4,15 +4,14 @@ describe SparkApi::Authentication::ApiAuth do
4
4
  subject {SparkApi::Authentication::ApiAuth.new(nil) }
5
5
  describe "build_param_hash" do
6
6
  it "should return a blank string when passed nil" do
7
- subject.build_param_string(nil).should be_empty
7
+ expect(subject.build_param_string(nil)).to be_empty
8
8
  end
9
9
  it "should return a correct param string for one item" do
10
- subject.build_param_string({:foo => "bar"}).should match("foobar")
10
+ expect(subject.build_param_string({:foo => "bar"})).to match("foobar")
11
11
  end
12
12
  it "should alphabatize the param names by key first, then by value" do
13
- subject.build_param_string({:zoo => "zar", :ooo => "car"}).should match("ooocarzoozar")
14
- subject.build_param_string({:Akey => "aValue", :aNotherkey => "AnotherValue"}).should
15
- match "AkeyaValueaNotherkeyAnotherValue"
13
+ expect(subject.build_param_string({:zoo => "zar", :ooo => "car"})).to match("ooocarzoozar")
14
+ expect(subject.build_param_string({:Akey => "aValue", :aNotherkey => "AnotherValue"})).to match("AkeyaValueaNotherkeyAnotherValue")
16
15
  end
17
16
  end
18
17
 
@@ -33,7 +32,7 @@ describe SparkApi::Authentication::ApiAuth do
33
32
  stub_request(:post, "https://api.sparkapi.com/#{SparkApi.version}/session").
34
33
  with(:query => {:ApiKey => "my_key", :ApiSig => "c731cf2455fbc7a4ef937b2301108d7a"}).
35
34
  to_return(:body => fixture("authentication_failure.json"), :status=>401)
36
- expect {subject.authenticate()}.to raise_error(SparkApi::ClientError){ |e| e.status.should == 401 }
35
+ expect {subject.authenticate()}.to raise_error(SparkApi::ClientError){ |e| expect(e.status).to eq(401) }
37
36
  end
38
37
  end
39
38
 
@@ -41,16 +40,16 @@ describe SparkApi::Authentication::ApiAuth do
41
40
  let(:session) { Object.new }
42
41
  it "should return true when session is active" do
43
42
  subject.session = session
44
- session.stub(:expired?) { false }
45
- subject.authenticated?.should eq(true)
43
+ allow(session).to receive(:expired?) { false }
44
+ expect(subject.authenticated?).to eq(true)
46
45
  end
47
46
  it "should return false when session is expired" do
48
47
  subject.session = session
49
- session.stub(:expired?) { true }
50
- subject.authenticated?.should eq(false)
48
+ allow(session).to receive(:expired?) { true }
49
+ expect(subject.authenticated?).to eq(false)
51
50
  end
52
51
  it "should return false when session is uninitialized" do
53
- subject.authenticated?.should eq(false)
52
+ expect(subject.authenticated?).to eq(false)
54
53
  end
55
54
  end
56
55
 
@@ -61,14 +60,14 @@ describe SparkApi::Authentication::ApiAuth do
61
60
  it "should logout when there is an active session" do
62
61
  logged_out = false
63
62
  subject.session = session
64
- client.stub(:delete).with("/session/1234") { logged_out = true }
63
+ allow(client).to receive(:delete).with("/session/1234") { logged_out = true }
65
64
  subject.logout
66
- subject.session.should eq(nil)
67
- logged_out.should eq(true)
65
+ expect(subject.session).to eq(nil)
66
+ expect(logged_out).to eq(true)
68
67
  end
69
68
  it "should skip logging out when there is no active session information" do
70
- client.stub(:delete) { raise "Should not be called" }
71
- subject.logout.should eq(nil)
69
+ allow(client).to receive(:delete) { raise "Should not be called" }
70
+ expect(subject.logout).to eq(nil)
72
71
  end
73
72
  end
74
73
 
@@ -98,7 +97,7 @@ describe SparkApi::Authentication::ApiAuth do
98
97
  :AuthToken => "1234"}.merge(args)).
99
98
  to_return(:body => fixture("listings/no_subresources.json"))
100
99
  subject.session = session
101
- subject.request(:get, "/#{SparkApi.version}/listings", nil, args).status.should eq(200)
100
+ expect(subject.request(:get, "/#{SparkApi.version}/listings", nil, args).status).to eq(200)
102
101
  end
103
102
  it "should handle a post request" do
104
103
  stub_auth_request
@@ -118,14 +117,14 @@ describe SparkApi::Authentication::ApiAuth do
118
117
  }]}
119
118
  }',
120
119
  :status=>201)
121
- subject.request(:post, "/#{SparkApi.version}/contacts", contact, args).status.should eq(201)
120
+ expect(subject.request(:post, "/#{SparkApi.version}/contacts", contact, args).status).to eq(201)
122
121
  end
123
122
  end
124
123
 
125
124
  describe "sign" do
126
125
  it "should sign the auth parameters correctly" do
127
126
  sign_token = "my_secretApiKeymy_key"
128
- subject.sign(sign_token).should eq("c731cf2455fbc7a4ef937b2301108d7a")
127
+ expect(subject.sign(sign_token)).to eq("c731cf2455fbc7a4ef937b2301108d7a")
129
128
  end
130
129
  end
131
130
 
@@ -134,7 +133,7 @@ describe SparkApi::Authentication::ApiAuth do
134
133
  subject {SparkApi::Authentication::ApiAuth.new(client) }
135
134
  it "should fully sign the token" do
136
135
  parms = {:AuthToken => "1234", :ApiUser => "CoolAsIce"}
137
- subject.sign_token("/test", parms).should eq("7bbe3384a8b64368357f8551cab271e3")
136
+ expect(subject.sign_token("/test", parms)).to eq("7bbe3384a8b64368357f8551cab271e3")
138
137
  end
139
138
  end
140
139
 
@@ -160,8 +159,8 @@ describe SparkApi::Authentication::ApiAuth do
160
159
  to_return(:body => fixture('listings/with_documents.json'))
161
160
  l = Listing.find('1234', :_expand => "Documents")
162
161
 
163
- count.should eq(2)
164
- SparkApi.client.session.expired?.should eq(false)
162
+ expect(count).to eq(2)
163
+ expect(SparkApi.client.session.expired?).to eq(false)
165
164
  end
166
165
  end
167
166
 
@@ -3,8 +3,8 @@ require './spec/spec_helper'
3
3
  describe SparkApi::Authentication::BaseAuth do
4
4
  subject {SparkApi::Authentication::BaseAuth.new(nil) }
5
5
  it "should raise an error" do
6
- expect {subject.authenticate()}.to raise_error(){ |e| e.message.should == "Implement me!"}
7
- expect {subject.logout()}.to raise_error(){ |e| e.message.should == "Implement me!"}
8
- expect {subject.request(nil, nil, nil, nil)}.to raise_error(){ |e| e.message.should == "Implement me!"}
6
+ expect {subject.authenticate()}.to raise_error(){ |e| expect(e.message).to eq("Implement me!")}
7
+ expect {subject.logout()}.to raise_error(){ |e| expect(e.message).to eq("Implement me!")}
8
+ expect {subject.request(nil, nil, nil, nil)}.to raise_error(){ |e| expect(e.message).to eq("Implement me!")}
9
9
  end
10
10
  end
@@ -10,7 +10,7 @@ describe SparkApi::Authentication::OAuth2Impl::SparkbarFaradayMiddleware do
10
10
  :status => 201
11
11
  }
12
12
  subject.on_complete env
13
- env[:body]["token"].should eq("sp4rkb4rt0k3n")
13
+ expect(env[:body]["token"]).to eq("sp4rkb4rt0k3n")
14
14
  end
15
15
 
16
16
  it "should raise error on unsuccessful response" do
@@ -4,6 +4,6 @@ describe SparkApi::Authentication::OAuth2Impl::GrantTypeBase do
4
4
  subject { SparkApi::Authentication::OAuth2Impl::GrantTypeBase }
5
5
  # Make sure the client boostraps the right plugin based on configuration.
6
6
  it "create should " do
7
- expect {subject.create(nil, InvalidAuth2Provider.new())}.to raise_error(SparkApi::ClientError){ |e| e.message.should == "Unsupported grant type [not_a_real_type]" }
7
+ expect {subject.create(nil, InvalidAuth2Provider.new())}.to raise_error(SparkApi::ClientError){ |e| expect(e.message).to eq("Unsupported grant type [not_a_real_type]") }
8
8
  end
9
9
  end
@@ -3,7 +3,7 @@ require 'spec_helper'
3
3
  describe SparkApi::Authentication::SingleSessionProvider do
4
4
  subject { SparkApi::Authentication::SingleSessionProvider.new({ :access_token => "the_token" }) }
5
5
  it "should initialize a new session with access_token" do
6
- subject.load_session.should respond_to(:access_token)
7
- subject.load_session.access_token.should eq("the_token")
6
+ expect(subject.load_session).to respond_to(:access_token)
7
+ expect(subject.load_session.access_token).to eq("the_token")
8
8
  end
9
9
  end
@@ -11,7 +11,7 @@ describe SparkApi::Authentication::OAuth2 do
11
11
  # Make sure the client boostraps the right plugin based on configuration.
12
12
  describe "plugin" do
13
13
  it "should load the oauth2 authenticator" do
14
- client.authenticator.class.should eq(SparkApi::Authentication::OAuth2)
14
+ expect(client.authenticator.class).to eq(SparkApi::Authentication::OAuth2)
15
15
  end
16
16
  end
17
17
  describe "authenticate" do
@@ -21,8 +21,8 @@ describe SparkApi::Authentication::OAuth2 do
21
21
  '{"client_id":"example-id","client_secret":"example-password","code":"my_code","grant_type":"authorization_code","redirect_uri":"https://exampleapp.fbsdata.com/oauth-callback"}'
22
22
  ).
23
23
  to_return(:body => fixture("oauth2/access.json"), :status=>200)
24
- subject.authenticate.access_token.should eq("04u7h-4cc355-70k3n")
25
- subject.authenticate.expires_in.should eq(57600)
24
+ expect(subject.authenticate.access_token).to eq("04u7h-4cc355-70k3n")
25
+ expect(subject.authenticate.expires_in).to eq(57600)
26
26
  end
27
27
 
28
28
  it "should raise an error when api credentials are invalid" do
@@ -31,7 +31,7 @@ describe SparkApi::Authentication::OAuth2 do
31
31
  '{"client_id":"example-id","client_secret":"example-password","code":"my_code","grant_type":"authorization_code","redirect_uri":"https://exampleapp.fbsdata.com/oauth-callback"}'
32
32
  ).
33
33
  to_return(:body => fixture("oauth2/error.json"), :status=>400)
34
- expect {subject.authenticate()}.to raise_error(SparkApi::ClientError){ |e| e.status.should == 400 }
34
+ expect {subject.authenticate()}.to raise_error(SparkApi::ClientError){ |e| expect(e.status).to eq(400) }
35
35
  end
36
36
 
37
37
  end
@@ -40,16 +40,16 @@ describe SparkApi::Authentication::OAuth2 do
40
40
  let(:session) { Object.new }
41
41
  it "should return true when session is active" do
42
42
  subject.session = session
43
- session.stub(:expired?) { false }
44
- subject.authenticated?.should eq(true)
43
+ allow(session).to receive(:expired?) { false }
44
+ expect(subject.authenticated?).to eq(true)
45
45
  end
46
46
  it "should return false when session is expired" do
47
47
  subject.session = session
48
- session.stub(:expired?) { true }
49
- subject.authenticated?.should eq(false)
48
+ allow(session).to receive(:expired?) { true }
49
+ expect(subject.authenticated?).to eq(false)
50
50
  end
51
51
  it "should return false when session is uninitialized" do
52
- subject.authenticated?.should eq(false)
52
+ expect(subject.authenticated?).to eq(false)
53
53
  end
54
54
  end
55
55
 
@@ -58,11 +58,11 @@ describe SparkApi::Authentication::OAuth2 do
58
58
  it "should logout when there is an active session" do
59
59
  subject.session = session
60
60
  subject.logout
61
- subject.session.should eq(nil)
61
+ expect(subject.session).to eq(nil)
62
62
  end
63
63
  it "should skip logging out when there is no active session information" do
64
- client.stub(:delete) { raise "Should not be called" }
65
- subject.logout.should eq(nil)
64
+ allow(client).to receive(:delete) { raise "Should not be called" }
65
+ expect(subject.logout).to eq(nil)
66
66
  end
67
67
  end
68
68
 
@@ -79,7 +79,7 @@ describe SparkApi::Authentication::OAuth2 do
79
79
  with(:query => args).
80
80
  to_return(:body => fixture("listings/no_subresources.json"))
81
81
  subject.session = session
82
- subject.request(:get, "/#{SparkApi.version}/listings", nil, args).status.should eq(200)
82
+ expect(subject.request(:get, "/#{SparkApi.version}/listings", nil, args).status).to eq(200)
83
83
  end
84
84
  it "should handle a post request" do
85
85
  subject.session = session
@@ -95,7 +95,7 @@ describe SparkApi::Authentication::OAuth2 do
95
95
  }]}
96
96
  }',
97
97
  :status=>201)
98
- subject.request(:post, "/#{SparkApi.version}/contacts", contact, args).status.should eq(201)
98
+ expect(subject.request(:post, "/#{SparkApi.version}/contacts", contact, args).status).to eq(201)
99
99
  end
100
100
  end
101
101
 
@@ -106,7 +106,7 @@ describe SparkApi::Authentication::OAuth2 do
106
106
  with(:body => "access_token=#{session.access_token}").
107
107
  to_return(:body => '{"token":"sp4rkb4rt0k3n"}')
108
108
  subject.session = session
109
- subject.sparkbar_token.should eq("sp4rkb4rt0k3n")
109
+ expect(subject.sparkbar_token).to eq("sp4rkb4rt0k3n")
110
110
  end
111
111
  it "should raise an error on missing sparkbar token" do
112
112
  c = stub_request(:post, "https://test.sparkplatform.com/appbar/authorize").
@@ -142,9 +142,9 @@ describe SparkApi::Authentication::OAuth2 do
142
142
  to_return(:body => fixture('errors/expired.json'), :status => 401).times(1).then.
143
143
  to_return(:body => fixture('listings/with_documents.json'))
144
144
  client.get("/listings/1234")
145
- count.should eq(1)
146
- refresh_count.should eq(1)
147
- client.session.expired?.should eq(false)
145
+ expect(count).to eq(1)
146
+ expect(refresh_count).to eq(1)
147
+ expect(client.session.expired?).to eq(false)
148
148
  end
149
149
  end
150
150
  context "and an invalid refresh token" do
@@ -164,8 +164,8 @@ describe SparkApi::Authentication::OAuth2 do
164
164
  to_return(:body => fixture('listings/with_documents.json'))
165
165
 
166
166
  client.get("/listings/1234")
167
- count.should eq(2)
168
- client.session.expired?.should eq(false)
167
+ expect(count).to eq(2)
168
+ expect(client.session.expired?).to eq(false)
169
169
  end
170
170
  end
171
171
  end
@@ -186,16 +186,16 @@ describe SparkApi::Authentication::OpenIdOAuth2Hybrid do
186
186
  end
187
187
  describe "plugin" do
188
188
  it "should load the hybrid authenticator" do
189
- client.authenticator.class.should eq(SparkApi::Authentication::OpenIdOAuth2Hybrid)
189
+ expect(client.authenticator.class).to eq(SparkApi::Authentication::OpenIdOAuth2Hybrid)
190
190
  end
191
191
  end
192
192
 
193
193
  describe "#authorization_url" do
194
194
  it "should include combined flow parameter" do
195
- client.authenticator.authorization_url.should match("openid.spark.combined_flow=true")
195
+ expect(client.authenticator.authorization_url).to match("openid.spark.combined_flow=true")
196
196
  end
197
197
  it "should allow custom parameters" do
198
- client.authenticator.authorization_url({"joshua" => "iscool"}).should match("joshua=iscool")
198
+ expect(client.authenticator.authorization_url({"joshua" => "iscool"})).to match("joshua=iscool")
199
199
  end
200
200
  end
201
201
  end
@@ -215,24 +215,24 @@ describe SparkApi::Authentication::OpenId do
215
215
 
216
216
  describe "plugin" do
217
217
  it "should not include combined flow parameter" do
218
- client.authenticator.authorization_url.should_not match("openid.spark.combined_flow=true")
218
+ expect(client.authenticator.authorization_url).not_to match("openid.spark.combined_flow=true")
219
219
  end
220
220
  it "should load the oauth2 authenticator" do
221
- client.authenticator.class.should eq(SparkApi::Authentication::OpenId)
221
+ expect(client.authenticator.class).to eq(SparkApi::Authentication::OpenId)
222
222
  end
223
223
  end
224
224
 
225
225
  describe "#authorization_url" do
226
226
  it "should allow custom parameters" do
227
- client.authenticator.authorization_url({"joshua" => "iscool"}).should match("joshua=iscool")
227
+ expect(client.authenticator.authorization_url({"joshua" => "iscool"})).to match("joshua=iscool")
228
228
  end
229
229
  end
230
230
 
231
231
  describe "forbidden methods" do
232
232
  it "should not allow authentication" do
233
- lambda {
233
+ expect {
234
234
  client.authenticate
235
- }.should raise_error(RuntimeError)
235
+ }.to raise_error(RuntimeError)
236
236
  end
237
237
  end
238
238
  end
@@ -240,12 +240,12 @@ end
240
240
  describe SparkApi::Authentication::BaseOAuth2Provider do
241
241
  context "session_timeout" do
242
242
  it "should provide a default" do
243
- subject.session_timeout.should eq(86400)
243
+ expect(subject.session_timeout).to eq(86400)
244
244
  end
245
245
  describe TestOAuth2Provider do
246
246
  subject { TestOAuth2Provider.new }
247
247
  it "should be able to override the session timeout" do
248
- subject.session_timeout.should eq(57600)
248
+ expect(subject.session_timeout).to eq(57600)
249
249
  end
250
250
  end
251
251
  end
@@ -260,18 +260,18 @@ describe "password authentication" do
260
260
  with(:body =>
261
261
  '{"client_id":"example-id","client_secret":"example-secret","grant_type":"password","password":"example-password","username":"example-user"}'
262
262
  ).to_return(:body => fixture("oauth2/access.json"), :status=>200)
263
- subject.authenticate.access_token.should eq("04u7h-4cc355-70k3n")
264
- subject.authenticate.expires_in.should eq(60)
263
+ expect(subject.authenticate.access_token).to eq("04u7h-4cc355-70k3n")
264
+ expect(subject.authenticate.expires_in).to eq(60)
265
265
  end
266
266
  end
267
267
  describe SparkApi::Authentication::OAuth2Impl do
268
268
  it "should load a provider" do
269
269
  example = "SparkApi::Authentication::OAuth2Impl::CLIProvider"
270
- SparkApi::Authentication::OAuth2Impl.load_provider(example,{}).class.to_s.should eq(example)
270
+ expect(SparkApi::Authentication::OAuth2Impl.load_provider(example,{}).class.to_s).to eq(example)
271
271
  prefix = "::#{example}"
272
- SparkApi::Authentication::OAuth2Impl.load_provider(prefix,{}).class.to_s.should eq(example)
272
+ expect(SparkApi::Authentication::OAuth2Impl.load_provider(prefix,{}).class.to_s).to eq(example)
273
273
  bad_example = "Derp::Derp::Derp::DerpProvider"
274
- expect{SparkApi::Authentication::OAuth2Impl.load_provider(bad_example,{}).class.to_s.should eq(bad_example)}.to raise_error(ArgumentError)
274
+ expect{expect(SparkApi::Authentication::OAuth2Impl.load_provider(bad_example,{}).class.to_s).to eq(bad_example)}.to raise_error(ArgumentError)
275
275
  end
276
276
 
277
277
  end
@@ -286,8 +286,8 @@ describe SparkApi::Authentication::OAuthSession do
286
286
  "start_time" => "2012-01-01T00:00:00+00:00"
287
287
  }
288
288
  session = SparkApi::Authentication::OAuthSession.new(args)
289
- session.start_time.should eq(DateTime.parse(args["start_time"]))
290
- JSON.parse(session.to_json).should eq(args)
289
+ expect(session.start_time).to eq(DateTime.parse(args["start_time"]))
290
+ expect(JSON.parse(session.to_json)).to eq(args)
291
291
  end
292
292
 
293
293
  it "should accept symbolized parameters" do
@@ -299,12 +299,12 @@ describe SparkApi::Authentication::OAuthSession do
299
299
  :start_time => "2012-01-01T00:00:00+00:00"
300
300
  }
301
301
  session = SparkApi::Authentication::OAuthSession.new(args)
302
- session.start_time.should eq(DateTime.parse(args[:start_time]))
303
- JSON.parse(session.to_json).should eq(JSON.parse(args.to_json))
302
+ expect(session.start_time).to eq(DateTime.parse(args[:start_time]))
303
+ expect(JSON.parse(session.to_json)).to eq(JSON.parse(args.to_json))
304
304
  end
305
305
 
306
306
  it "should not expire if expires_in is nil" do
307
307
  session = SparkApi::Authentication::OAuthSession.new
308
- session.expired?.should eq(false)
308
+ expect(session.expired?).to eq(false)
309
309
  end
310
310
  end