spark_api 1.4.28 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +5 -13
  2. data/README.md +1 -1
  3. data/VERSION +1 -1
  4. data/lib/spark_api/authentication/api_auth.rb +1 -1
  5. data/lib/spark_api/authentication/oauth2.rb +1 -1
  6. data/lib/spark_api/authentication/oauth2_impl/grant_type_base.rb +1 -1
  7. data/lib/spark_api/client.rb +2 -2
  8. data/lib/spark_api/models.rb +1 -0
  9. data/lib/spark_api/models/account.rb +7 -1
  10. data/lib/spark_api/models/account_report.rb +0 -5
  11. data/lib/spark_api/models/floplan.rb +24 -0
  12. data/lib/spark_api/models/listing.rb +11 -1
  13. data/lib/spark_api/models/subresource.rb +2 -2
  14. data/lib/spark_api/request.rb +2 -2
  15. data/script/reso_middleware_example.rb +70 -0
  16. data/spec/fixtures/listings/floplans_index.json +15 -0
  17. data/spec/spec_helper.rb +9 -4
  18. data/spec/unit/spark_api/authentication/api_auth_spec.rb +21 -22
  19. data/spec/unit/spark_api/authentication/base_auth_spec.rb +3 -3
  20. data/spec/unit/spark_api/authentication/oauth2_impl/faraday_middleware_spec.rb +1 -1
  21. data/spec/unit/spark_api/authentication/oauth2_impl/grant_type_base_spec.rb +1 -1
  22. data/spec/unit/spark_api/authentication/oauth2_impl/single_session_provider_spec.rb +2 -2
  23. data/spec/unit/spark_api/authentication/oauth2_spec.rb +40 -40
  24. data/spec/unit/spark_api/authentication_spec.rb +2 -2
  25. data/spec/unit/spark_api/configuration/yaml_spec.rb +44 -44
  26. data/spec/unit/spark_api/configuration_spec.rb +56 -57
  27. data/spec/unit/spark_api/faraday_middleware_spec.rb +12 -12
  28. data/spec/unit/spark_api/models/account_report_spec.rb +2 -22
  29. data/spec/unit/spark_api/models/account_spec.rb +24 -21
  30. data/spec/unit/spark_api/models/activity_spec.rb +5 -5
  31. data/spec/unit/spark_api/models/base_spec.rb +32 -32
  32. data/spec/unit/spark_api/models/concerns/destroyable_spec.rb +2 -2
  33. data/spec/unit/spark_api/models/concerns/savable_spec.rb +19 -19
  34. data/spec/unit/spark_api/models/connect_prefs_spec.rb +1 -1
  35. data/spec/unit/spark_api/models/constraint_spec.rb +1 -1
  36. data/spec/unit/spark_api/models/contact_spec.rb +50 -50
  37. data/spec/unit/spark_api/models/dirty_spec.rb +12 -12
  38. data/spec/unit/spark_api/models/document_spec.rb +3 -3
  39. data/spec/unit/spark_api/models/fields_spec.rb +17 -17
  40. data/spec/unit/spark_api/models/finders_spec.rb +7 -7
  41. data/spec/unit/spark_api/models/floplan_spec.rb +24 -0
  42. data/spec/unit/spark_api/models/listing_cart_spec.rb +46 -46
  43. data/spec/unit/spark_api/models/listing_meta_translations_spec.rb +6 -6
  44. data/spec/unit/spark_api/models/listing_spec.rb +91 -91
  45. data/spec/unit/spark_api/models/message_spec.rb +10 -10
  46. data/spec/unit/spark_api/models/note_spec.rb +10 -10
  47. data/spec/unit/spark_api/models/notification_spec.rb +6 -6
  48. data/spec/unit/spark_api/models/open_house_spec.rb +4 -4
  49. data/spec/unit/spark_api/models/photo_spec.rb +8 -8
  50. data/spec/unit/spark_api/models/portal_spec.rb +4 -4
  51. data/spec/unit/spark_api/models/property_types_spec.rb +5 -5
  52. data/spec/unit/spark_api/models/rental_calendar_spec.rb +13 -11
  53. data/spec/unit/spark_api/models/rule_spec.rb +2 -2
  54. data/spec/unit/spark_api/models/saved_search_spec.rb +33 -33
  55. data/spec/unit/spark_api/models/search_template/quick_search_spec.rb +5 -5
  56. data/spec/unit/spark_api/models/shared_listing_spec.rb +12 -12
  57. data/spec/unit/spark_api/models/sort_spec.rb +3 -3
  58. data/spec/unit/spark_api/models/standard_fields_spec.rb +12 -12
  59. data/spec/unit/spark_api/models/subresource_spec.rb +33 -15
  60. data/spec/unit/spark_api/models/system_info_spec.rb +7 -7
  61. data/spec/unit/spark_api/models/tour_of_home_spec.rb +3 -3
  62. data/spec/unit/spark_api/models/video_spec.rb +9 -9
  63. data/spec/unit/spark_api/models/virtual_tour_spec.rb +7 -7
  64. data/spec/unit/spark_api/models/vow_account_spec.rb +8 -8
  65. data/spec/unit/spark_api/multi_client_spec.rb +14 -14
  66. data/spec/unit/spark_api/options_hash_spec.rb +4 -4
  67. data/spec/unit/spark_api/paginate_spec.rb +71 -71
  68. data/spec/unit/spark_api/primary_array_spec.rb +5 -5
  69. data/spec/unit/spark_api/request_spec.rb +65 -59
  70. data/spec/unit/spark_api_spec.rb +6 -6
  71. metadata +215 -280
checksums.yaml CHANGED
@@ -1,15 +1,7 @@
1
1
  ---
2
- !binary "U0hBMQ==":
3
- metadata.gz: !binary |-
4
- YzJlMDhlOWU3YmE2MjRlOWI1YzUxZGRhZmQ2ODE1MWM3NTVhNGFhZQ==
5
- data.tar.gz: !binary |-
6
- OTBkZWY3NzI2NGFlODdjOGY2N2JkMmM0NDlhNWIxZTZjMDhkMGIwOQ==
2
+ SHA256:
3
+ metadata.gz: 17eaaa74af21a3a858941c286e4731ae9d67d1b315f06f5a3409d78d7d4fa89e
4
+ data.tar.gz: 51373eaa638eefed5245d88d78cc0e1374d75cdfe385c3c358d4d40348059514
7
5
  SHA512:
8
- metadata.gz: !binary |-
9
- Zjc1MGQ4NmY0NWE0MjNmYTUzYmMzNzM4MTcxY2VmY2ZkODE1YTgxMjYwZjU3
10
- NmIxMmVkZjI1OGYyMmM4NDAwMWU3ZmIwNzUwOWU2MmFmNDdmN2U1NDcwMTQ3
11
- NTY5NjM0NmQzNTAzNzQ1NDUwYmYzMDFmZjg3NWRiYmQxNzk3YzU=
12
- data.tar.gz: !binary |-
13
- NDc5YTFmNzVlNjcwMWRjNzE2NzI2ZDdmYjVmNzg0ZjQ5ZjBmNDA1MThmMzNh
14
- YTI0NjIxYzZkYjY2YTA0NDVkMzA0ZWU3M2ZjMmNiMjY0Y2E1NDgzMTY0ZWYx
15
- MjEwNTAxNmI2ZjMxNjk2YTBkZjc0Y2VhNzExZTY1NDE0NzU0NDI=
6
+ metadata.gz: 7d81cd63e089a1182557cdeb95ab1e52d526904879294f66e22026c68d3dd4dd5ec8408763fd9d62ac72a9fcf23964ee9eec3d5e6e104a214e33c5a46ece21f7
7
+ data.tar.gz: c44265855abe3384dea240410e96230b68fca9f9ac3cef702feb1ff0e05d026d571833591d5fa4865c21f04dc9f48d7f2987ffc9895088ac5c4431d73fa9b3c0
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  Spark API
2
2
  =====================
3
- [![Build Status](https://travis-ci.org/sparkapi/spark_api.png?branch=master)](http://travis-ci.org/sparkapi/spark_api) [![Code Climate](https://codeclimate.com/badge.png)](https://codeclimate.com/github/sparkapi/spark_api)
3
+ ![CI](https://github.com/sparkapi/spark_api/workflows/CI/badge.svg) ![Code Climate](https://codeclimate.com/badge.png)
4
4
 
5
5
  A Ruby wrapper for the Spark REST API. Loosely based on ActiveResource to provide models to interact with remote services.
6
6
 
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.4.28
1
+ 1.5.0
@@ -62,7 +62,7 @@ module SparkApi
62
62
 
63
63
  # Perform an HTTP request (no data)
64
64
  def request(method, path, body, options)
65
- escaped_path = URI.escape(path)
65
+ escaped_path = Addressable::URI.escape(path)
66
66
  request_opts = {
67
67
  :AuthToken => @session.auth_token
68
68
  }
@@ -40,7 +40,7 @@ module SparkApi
40
40
 
41
41
  # Perform an HTTP request (no data)
42
42
  def request(method, path, body, options={})
43
- escaped_path = URI.escape(path)
43
+ escaped_path = Addressable::URI.escape(path)
44
44
  connection = @client.connection(true) # SSL Only!
45
45
  connection.headers.merge!(self.auth_header)
46
46
 
@@ -45,7 +45,7 @@ module SparkApi
45
45
  response.expires_in = provider.session_timeout if response.expires_in.nil?
46
46
  SparkApi.logger.debug { "[oauth2] New session created #{response}" }
47
47
  response
48
- rescue Faraday::Error::ConnectionFailed => e
48
+ rescue Faraday::ConnectionFailed => e
49
49
  if @client.ssl_verify && e.message =~ /certificate verify failed/
50
50
  SparkApi.logger.error { SparkApi::Errors.ssl_verification_error }
51
51
  end
@@ -2,7 +2,7 @@ module SparkApi
2
2
  # =API Client
3
3
  # Main class to setup and run requests on the API. A default client is accessible globally as
4
4
  # SparkApi::client if the global configuration has been set as well. Otherwise, this class may
5
- # be instanciated separately with the configuration information.
5
+ # be instantiated separately with the configuration information.
6
6
  class Client
7
7
  include Connection
8
8
  include Authentication
@@ -21,7 +21,7 @@ module SparkApi
21
21
  Configuration::VALID_OPTION_KEYS.each do |key|
22
22
  send("#{key}=", options[key])
23
23
  end
24
- # Instanciate the authenication class passed in.
24
+ # Instantiate the authentication class passed in.
25
25
  @authenticator = authentication_mode.send("new", self)
26
26
  end
27
27
 
@@ -20,6 +20,7 @@ require 'spark_api/models/fields'
20
20
  require 'spark_api/models/idx'
21
21
  require 'spark_api/models/idx_link'
22
22
  require 'spark_api/models/incomplete_listing'
23
+ require 'spark_api/models/floplan'
23
24
  require 'spark_api/models/listing'
24
25
  require 'spark_api/models/listing_cart'
25
26
  require 'spark_api/models/listing_meta_translations'
@@ -52,7 +52,13 @@ module SparkApi
52
52
  nil
53
53
  end
54
54
  end
55
-
55
+
56
+ def logo
57
+ if images.kind_of? Array
58
+ images.find { |image| image.Type == "Logo" }
59
+ end
60
+ end
61
+
56
62
  def save(arguments={})
57
63
  self.errors = [] # clear the errors hash
58
64
  begin
@@ -21,11 +21,6 @@ module SparkApi
21
21
  end
22
22
  end
23
23
 
24
- def logo
25
- if images.kind_of? Array
26
- images.find { |image| image.Type == "Logo" }
27
- end
28
- end
29
24
  end
30
25
  end
31
26
  end
@@ -0,0 +1,24 @@
1
+ module SparkApi
2
+ module Models
3
+ class FloPlan < Base
4
+ extend Subresource
5
+ self.element_name = 'floplans'
6
+
7
+ attr_accessor :images, :thumbnails
8
+
9
+ def initialize(attributes={})
10
+ @images = []
11
+ @thumbnails = []
12
+
13
+ attributes['Images'].each do |img|
14
+ if img["Type"].include?('thumbnail')
15
+ @thumbnails << img
16
+ else
17
+ @images << img
18
+ end
19
+ end
20
+ super(attributes)
21
+ end
22
+ end
23
+ end
24
+ end
@@ -2,7 +2,7 @@ module SparkApi
2
2
  module Models
3
3
  class Listing < Base
4
4
  extend Finders
5
- attr_accessor :photos, :videos, :virtual_tours, :documents, :open_houses, :tour_of_homes, :rental_calendars
5
+ attr_accessor :photos, :videos, :virtual_tours, :documents, :open_houses, :tour_of_homes, :rental_calendars, :floplans
6
6
  attr_accessor :constraints
7
7
  self.element_name="listings"
8
8
  DATA_MASK = "********"
@@ -17,6 +17,7 @@ module SparkApi
17
17
  @constraints = []
18
18
  @tour_of_homes = []
19
19
  @open_houses = []
20
+ @floplans = []
20
21
 
21
22
  if attributes.has_key?('StandardFields')
22
23
  pics, vids, tours, docs, ohouses, tourhomes = attributes['StandardFields'].values_at('Photos','Videos', 'VirtualTours', 'Documents', 'OpenHouses', 'TourOfHomes')
@@ -26,6 +27,10 @@ module SparkApi
26
27
  rentalcalendars = attributes['RentalCalendar']
27
28
  end
28
29
 
30
+ if attributes.has_key?('FloPlans')
31
+ floplans = attributes['FloPlans']
32
+ end
33
+
29
34
  if pics != nil
30
35
  setup_attribute(@photos, pics, Photo)
31
36
  attributes['StandardFields'].delete('Photos')
@@ -61,6 +66,11 @@ module SparkApi
61
66
  attributes.delete('RentalCalendar')
62
67
  end
63
68
 
69
+ if floplans != nil
70
+ setup_attribute(@floplans, floplans, FloPlan)
71
+ attributes.delete('FloPlans')
72
+ end
73
+
64
74
  super(attributes)
65
75
  end
66
76
 
@@ -16,10 +16,10 @@ module SparkApi
16
16
 
17
17
  def parse_date_start_and_end_times(attributes)
18
18
  # Transform the date strings
19
- unless attributes['Date'].nil?
19
+ unless attributes['Date'].nil? || attributes['Date'].empty?
20
20
  date = Date.strptime attributes['Date'], '%m/%d/%Y'
21
21
  ['StartTime','EndTime'].each do |time|
22
- next if attributes[time].nil?
22
+ next if attributes[time].nil? || attributes[time].empty?
23
23
  formatted_date = "#{attributes['Date']}T#{attributes[time]}"
24
24
  datetime = nil
25
25
 
@@ -99,7 +99,7 @@ module SparkApi
99
99
  else
100
100
  return response.body
101
101
  end
102
- rescue Faraday::Error::ConnectionFailed => e
102
+ rescue Faraday::ConnectionFailed => e
103
103
  if self.ssl_verify && e.message =~ /certificate verify failed/
104
104
  SparkApi.logger.error { SparkApi::Errors.ssl_verification_error }
105
105
  end
@@ -107,7 +107,7 @@ module SparkApi
107
107
  end
108
108
 
109
109
  def process_request_body(body)
110
- if body.is_a?(Hash)
110
+ if body.is_a?(Hash) || body.is_a?(Array)
111
111
  body.empty? ? nil : {"D" => body }.to_json
112
112
  else
113
113
  body
@@ -0,0 +1,70 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # This script demonstrates how to use the RESO Web API with the Ruby client by pulling listings
4
+ # and replacing encoded field values with their corresponding human-readable values, which are
5
+ # pulled from the XML returned by the RESO metadata endpoint.
6
+
7
+ require "spark_api"
8
+ require "nokogiri"
9
+
10
+ # set up session and RESO Web API middleware
11
+ SparkApi.configure do |config|
12
+ config.authentication_mode = SparkApi::Authentication::OAuth2
13
+ config.middleware = :reso_api
14
+ end
15
+
16
+ SparkApi.client.session = SparkApi::Authentication::OAuthSession.new({ :access_token => "OAUTH2_ACCESS_TOKEN" })
17
+
18
+ # pull metadata from RESO Web API
19
+ metadata_res = (SparkApi.client.get("/$metadata", {:$ApiUser => "FLEXMLS_TECH_ID"}) )
20
+ metadata_xml = Nokogiri::XML(metadata_res).remove_namespaces!
21
+
22
+ # make an array of fields which need to be checked for readable values
23
+ fields_to_lookup = []
24
+ metadata_xml.xpath('//Schema/EnumType/@Name').each do |el|
25
+ fields_to_lookup << el.to_str
26
+ end
27
+
28
+ # get 25 listings
29
+ listings = (SparkApi.client.get("/Property", {:$top => 25, :$ApiUser => "FLEXMLS_TECH_ID"} ))
30
+
31
+ listings['value'].each do |listing| # for each listing,
32
+ fields_to_lookup.each do |field| # go through the array of fields to be checked.
33
+ if !!listing[field] # when one of the fields that needs to be checked exists in a listing,
34
+ if listing[field].is_a? String
35
+ readable = metadata_xml.xpath( # check for readable value to be swapped in
36
+ "//Schema/
37
+ EnumType[@Name=\"#{field}\"]/
38
+ Member[@Name=\"#{listing[field]}\"]/
39
+ Annotation"
40
+ ).attr("String")
41
+
42
+ # if there is a readable value, swap it in
43
+ if !!readable
44
+ listing[field] = readable.to_str
45
+ end
46
+
47
+ elsif listing[field].is_a? Array
48
+ readable_arr = []
49
+ listing[field].each do |el|
50
+ readable = metadata_xml.xpath( # check for readable value to be swapped in
51
+ "//Schema/
52
+ EnumType[@Name=\"#{field}\"]/
53
+ Member[@Name=\"#{el}\"]/
54
+ Annotation"
55
+ ).attr("String")
56
+
57
+ # assemble a new array with readable values and swap it in
58
+ if !!readable
59
+ readable_arr << readable.to_str
60
+ else
61
+ readable_arr << el
62
+ end
63
+ listing[field] = readable_arr
64
+ end
65
+ end
66
+ end
67
+ end
68
+ end
69
+
70
+ puts listings
@@ -0,0 +1,15 @@
1
+ {
2
+ "D":{
3
+ "Results":[{
4
+ "Id": 4300,
5
+ "Name": "Feb 13",
6
+ "Images": [
7
+ {"Uri": "https://foo.bar",
8
+ "Type":"all_in_one_png"},
9
+ {"Uri": "https://foo.bar",
10
+ "Type": "all_in_one_thumbnail_png"}
11
+ ],
12
+ "Success":true
13
+ }]
14
+ }
15
+ }
@@ -11,7 +11,6 @@ end
11
11
 
12
12
  require "rubygems"
13
13
  require "rspec"
14
- require 'rspec/autorun'
15
14
  require 'webmock/rspec'
16
15
  require "json"
17
16
  require 'multi_json'
@@ -22,6 +21,13 @@ require path + '/spark_api'
22
21
 
23
22
  require 'spark_api'
24
23
 
24
+ if ENV['COVERAGE'] == "on"
25
+ require 'simplecov'
26
+ require 'simplecov-rcov'
27
+ SimpleCov.formatter = SimpleCov::Formatter::RcovFormatter
28
+ SimpleCov.start { add_filter %w(/vendor /spec /test) }
29
+ end
30
+
25
31
  FileUtils.mkdir 'log' unless File.exists? 'log'
26
32
 
27
33
  module SparkApi
@@ -48,17 +54,16 @@ end
48
54
  Dir[File.expand_path(File.join(File.dirname(__FILE__),'support','**','*.rb'))].each {|f| require f}
49
55
 
50
56
  RSpec.configure do |config|
51
-
57
+
52
58
  config.include WebMock::API
53
59
  config.include StubApiRequests
54
60
 
55
- config.treat_symbols_as_metadata_keys_with_true_values = true
56
61
  config.alias_example_to :on_get_it, :method => 'GET'
57
62
  config.alias_example_to :on_put_it, :method => 'PUT'
58
63
  config.alias_example_to :on_post_it, :method => 'POST'
59
64
  config.alias_example_to :on_delete_it, :method => 'DELETE'
60
65
  config.before(:all) { reset_config }
61
- config.color_enabled = true
66
+ config.color = true
62
67
  end
63
68
 
64
69
  def jruby?
@@ -4,15 +4,14 @@ describe SparkApi::Authentication::ApiAuth do
4
4
  subject {SparkApi::Authentication::ApiAuth.new(nil) }
5
5
  describe "build_param_hash" do
6
6
  it "should return a blank string when passed nil" do
7
- subject.build_param_string(nil).should be_empty
7
+ expect(subject.build_param_string(nil)).to be_empty
8
8
  end
9
9
  it "should return a correct param string for one item" do
10
- subject.build_param_string({:foo => "bar"}).should match("foobar")
10
+ expect(subject.build_param_string({:foo => "bar"})).to match("foobar")
11
11
  end
12
12
  it "should alphabatize the param names by key first, then by value" do
13
- subject.build_param_string({:zoo => "zar", :ooo => "car"}).should match("ooocarzoozar")
14
- subject.build_param_string({:Akey => "aValue", :aNotherkey => "AnotherValue"}).should
15
- match "AkeyaValueaNotherkeyAnotherValue"
13
+ expect(subject.build_param_string({:zoo => "zar", :ooo => "car"})).to match("ooocarzoozar")
14
+ expect(subject.build_param_string({:Akey => "aValue", :aNotherkey => "AnotherValue"})).to match("AkeyaValueaNotherkeyAnotherValue")
16
15
  end
17
16
  end
18
17
 
@@ -33,7 +32,7 @@ describe SparkApi::Authentication::ApiAuth do
33
32
  stub_request(:post, "https://api.sparkapi.com/#{SparkApi.version}/session").
34
33
  with(:query => {:ApiKey => "my_key", :ApiSig => "c731cf2455fbc7a4ef937b2301108d7a"}).
35
34
  to_return(:body => fixture("authentication_failure.json"), :status=>401)
36
- expect {subject.authenticate()}.to raise_error(SparkApi::ClientError){ |e| e.status.should == 401 }
35
+ expect {subject.authenticate()}.to raise_error(SparkApi::ClientError){ |e| expect(e.status).to eq(401) }
37
36
  end
38
37
  end
39
38
 
@@ -41,16 +40,16 @@ describe SparkApi::Authentication::ApiAuth do
41
40
  let(:session) { Object.new }
42
41
  it "should return true when session is active" do
43
42
  subject.session = session
44
- session.stub(:expired?) { false }
45
- subject.authenticated?.should eq(true)
43
+ allow(session).to receive(:expired?) { false }
44
+ expect(subject.authenticated?).to eq(true)
46
45
  end
47
46
  it "should return false when session is expired" do
48
47
  subject.session = session
49
- session.stub(:expired?) { true }
50
- subject.authenticated?.should eq(false)
48
+ allow(session).to receive(:expired?) { true }
49
+ expect(subject.authenticated?).to eq(false)
51
50
  end
52
51
  it "should return false when session is uninitialized" do
53
- subject.authenticated?.should eq(false)
52
+ expect(subject.authenticated?).to eq(false)
54
53
  end
55
54
  end
56
55
 
@@ -61,14 +60,14 @@ describe SparkApi::Authentication::ApiAuth do
61
60
  it "should logout when there is an active session" do
62
61
  logged_out = false
63
62
  subject.session = session
64
- client.stub(:delete).with("/session/1234") { logged_out = true }
63
+ allow(client).to receive(:delete).with("/session/1234") { logged_out = true }
65
64
  subject.logout
66
- subject.session.should eq(nil)
67
- logged_out.should eq(true)
65
+ expect(subject.session).to eq(nil)
66
+ expect(logged_out).to eq(true)
68
67
  end
69
68
  it "should skip logging out when there is no active session information" do
70
- client.stub(:delete) { raise "Should not be called" }
71
- subject.logout.should eq(nil)
69
+ allow(client).to receive(:delete) { raise "Should not be called" }
70
+ expect(subject.logout).to eq(nil)
72
71
  end
73
72
  end
74
73
 
@@ -98,7 +97,7 @@ describe SparkApi::Authentication::ApiAuth do
98
97
  :AuthToken => "1234"}.merge(args)).
99
98
  to_return(:body => fixture("listings/no_subresources.json"))
100
99
  subject.session = session
101
- subject.request(:get, "/#{SparkApi.version}/listings", nil, args).status.should eq(200)
100
+ expect(subject.request(:get, "/#{SparkApi.version}/listings", nil, args).status).to eq(200)
102
101
  end
103
102
  it "should handle a post request" do
104
103
  stub_auth_request
@@ -118,14 +117,14 @@ describe SparkApi::Authentication::ApiAuth do
118
117
  }]}
119
118
  }',
120
119
  :status=>201)
121
- subject.request(:post, "/#{SparkApi.version}/contacts", contact, args).status.should eq(201)
120
+ expect(subject.request(:post, "/#{SparkApi.version}/contacts", contact, args).status).to eq(201)
122
121
  end
123
122
  end
124
123
 
125
124
  describe "sign" do
126
125
  it "should sign the auth parameters correctly" do
127
126
  sign_token = "my_secretApiKeymy_key"
128
- subject.sign(sign_token).should eq("c731cf2455fbc7a4ef937b2301108d7a")
127
+ expect(subject.sign(sign_token)).to eq("c731cf2455fbc7a4ef937b2301108d7a")
129
128
  end
130
129
  end
131
130
 
@@ -134,7 +133,7 @@ describe SparkApi::Authentication::ApiAuth do
134
133
  subject {SparkApi::Authentication::ApiAuth.new(client) }
135
134
  it "should fully sign the token" do
136
135
  parms = {:AuthToken => "1234", :ApiUser => "CoolAsIce"}
137
- subject.sign_token("/test", parms).should eq("7bbe3384a8b64368357f8551cab271e3")
136
+ expect(subject.sign_token("/test", parms)).to eq("7bbe3384a8b64368357f8551cab271e3")
138
137
  end
139
138
  end
140
139
 
@@ -160,8 +159,8 @@ describe SparkApi::Authentication::ApiAuth do
160
159
  to_return(:body => fixture('listings/with_documents.json'))
161
160
  l = Listing.find('1234', :_expand => "Documents")
162
161
 
163
- count.should eq(2)
164
- SparkApi.client.session.expired?.should eq(false)
162
+ expect(count).to eq(2)
163
+ expect(SparkApi.client.session.expired?).to eq(false)
165
164
  end
166
165
  end
167
166