spark_api 1.1.0 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/VERSION +1 -1
- data/lib/spark_api/authentication/api_auth.rb +3 -3
- data/lib/spark_api/authentication/oauth2_impl/grant_type_code.rb +8 -7
- data/lib/spark_api/authentication/oauth2_impl/grant_type_password.rb +4 -3
- data/lib/spark_api/authentication/oauth2_impl/grant_type_refresh.rb +3 -2
- data/lib/spark_api/configuration/yaml.rb +8 -6
- data/lib/spark_api/models/listing_cart.rb +1 -1
- data/lib/spark_api/models/open_house.rb +1 -9
- data/lib/spark_api/models/subresource.rb +20 -0
- data/lib/spark_api/models/tour_of_home.rb +1 -7
- data/spec/fixtures/contacts/new.json +2 -2
- data/spec/fixtures/contacts/new_empty.json +2 -2
- data/spec/fixtures/listing_carts/new.json +2 -2
- data/spec/fixtures/listings/photos/new.json +3 -3
- data/spec/fixtures/listings/tour_of_homes.json +4 -4
- data/spec/json_hash_test_support.rb +251 -0
- data/spec/mock_helper.rb +5 -5
- data/spec/spec_helper.rb +2 -0
- data/spec/unit/spark_api/authentication/oauth2_spec.rb +18 -16
- data/spec/unit/spark_api/models/listing_cart_spec.rb +1 -2
- data/spec/unit/spark_api/models/open_house_spec.rb +10 -4
- data/spec/unit/spark_api/models/tour_of_home_spec.rb +8 -2
- metadata +13 -11
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
1.1.
|
1
|
+
1.1.1
|
@@ -38,12 +38,12 @@ module SparkApi
|
|
38
38
|
# specifically for signing a request.
|
39
39
|
def build_param_string(param_hash)
|
40
40
|
return "" if param_hash.nil?
|
41
|
-
sorted = param_hash.sort do |a,b|
|
41
|
+
sorted = param_hash.keys.sort do |a,b|
|
42
42
|
a.to_s <=> b.to_s
|
43
43
|
end
|
44
44
|
params = ""
|
45
|
-
sorted.each do |key
|
46
|
-
params += key.to_s +
|
45
|
+
sorted.each do |key|
|
46
|
+
params += key.to_s + param_hash[key].to_s
|
47
47
|
end
|
48
48
|
params
|
49
49
|
end
|
@@ -32,13 +32,14 @@ module SparkApi
|
|
32
32
|
|
33
33
|
private
|
34
34
|
def token_params
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
35
|
+
hash = {
|
36
|
+
"client_id" => @provider.client_id,
|
37
|
+
"client_secret" => @provider.client_secret,
|
38
|
+
"code" => @provider.code,
|
39
|
+
"grant_type" => "authorization_code",
|
40
|
+
"redirect_uri" => @provider.redirect_uri
|
41
|
+
}
|
42
|
+
MultiJson.dump(hash)
|
42
43
|
end
|
43
44
|
|
44
45
|
end
|
@@ -26,13 +26,14 @@ module SparkApi
|
|
26
26
|
|
27
27
|
private
|
28
28
|
def token_params
|
29
|
-
|
29
|
+
hash = {
|
30
30
|
"client_id" => @provider.client_id,
|
31
31
|
"client_secret" => @provider.client_secret,
|
32
32
|
"grant_type" => "password",
|
33
|
-
"username" => @provider.username,
|
34
33
|
"password" => @provider.password,
|
35
|
-
|
34
|
+
"username" => @provider.username
|
35
|
+
}
|
36
|
+
MultiJson.dump(hash)
|
36
37
|
end
|
37
38
|
end
|
38
39
|
end
|
@@ -20,12 +20,13 @@ module SparkApi
|
|
20
20
|
|
21
21
|
private
|
22
22
|
def token_params
|
23
|
-
@params.merge({
|
23
|
+
hash = @params.merge({
|
24
24
|
"client_id" => @provider.client_id,
|
25
25
|
"client_secret" => @provider.client_secret,
|
26
26
|
"grant_type" => "refresh_token",
|
27
27
|
"refresh_token"=> session.refresh_token,
|
28
|
-
})
|
28
|
+
})
|
29
|
+
MultiJson.dump(hash)
|
29
30
|
end
|
30
31
|
end
|
31
32
|
|
@@ -31,13 +31,15 @@ module SparkApi
|
|
31
31
|
@name
|
32
32
|
end
|
33
33
|
def api_env
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
34
|
+
if env.include? "SPARK_API_ENV"
|
35
|
+
env["SPARK_API_ENV"]
|
36
|
+
elsif env.include? "RAILS_ENV"
|
37
|
+
env["RAILS_ENV"]
|
38
|
+
elsif env.include? "RACK_ENV"
|
39
|
+
env["RACK_ENV"]
|
40
|
+
else
|
41
|
+
"development"
|
39
42
|
end
|
40
|
-
return current_env
|
41
43
|
end
|
42
44
|
|
43
45
|
# Used to specify the root of where to look for SparkApi config files
|
@@ -63,7 +63,7 @@ module SparkApi
|
|
63
63
|
true
|
64
64
|
end
|
65
65
|
def update!(arguments={})
|
66
|
-
results = connection.put "#{self.class.path}/#{self.Id}", {"ListingCarts" => [ {"
|
66
|
+
results = connection.put "#{self.class.path}/#{self.Id}", {"ListingCarts" => [ {"ListingIds" => attributes["ListingIds"],"Name" => attributes["Name"]} ] }, arguments
|
67
67
|
true
|
68
68
|
end
|
69
69
|
|
@@ -9,18 +9,10 @@ module SparkApi
|
|
9
9
|
self.element_name = "openhouses"
|
10
10
|
|
11
11
|
def initialize(attributes={})
|
12
|
-
|
13
|
-
unless attributes['Date'].nil?
|
14
|
-
date = Date.parse(attributes['Date'])
|
15
|
-
attributes['Date'] = date
|
16
|
-
attributes['StartTime'] = Time.parse("#{date}T#{attributes['StartTime']}") unless attributes['StartTime'].nil?
|
17
|
-
attributes['EndTime'] = Time.parse("#{date}T#{attributes['EndTime']}") unless attributes['EndTime'].nil?
|
18
|
-
end
|
19
|
-
|
12
|
+
self.class.parse_date_start_and_end_times attributes
|
20
13
|
if attributes["Comments"].nil?
|
21
14
|
attributes["Comments"] = ""
|
22
15
|
end
|
23
|
-
|
24
16
|
super(attributes)
|
25
17
|
end
|
26
18
|
|
@@ -14,6 +14,26 @@ module SparkApi
|
|
14
14
|
collect(connection.get("/listings/#{parent_id}#{self.path}/#{id}", arguments)).first
|
15
15
|
end
|
16
16
|
|
17
|
+
def parse_date_start_and_end_times(attributes)
|
18
|
+
# Transform the date strings
|
19
|
+
unless attributes['Date'].nil?
|
20
|
+
date = Date.strptime attributes['Date'], '%m/%d/%Y'
|
21
|
+
['StartTime','EndTime'].each do |time|
|
22
|
+
next if attributes[time].nil?
|
23
|
+
format = '%m/%d/%YT%H:%M%z'
|
24
|
+
if attributes[time].split(':').size > 3
|
25
|
+
format = '%m/%d/%YT%H:%M:%S%z'
|
26
|
+
end
|
27
|
+
formatted_date = "#{attributes['Date']}T#{attributes[time]} FORMAT: #{format}"
|
28
|
+
datetime = DateTime.strptime(formatted_date, format)
|
29
|
+
datetime = datetime.new_offset DateTime.now.offset
|
30
|
+
attributes[time] = Time.local(datetime.year, datetime.month, datetime.day, datetime.hour, datetime.min,
|
31
|
+
datetime.sec)
|
32
|
+
end
|
33
|
+
attributes['Date'] = date
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
17
37
|
end
|
18
38
|
end
|
19
39
|
end
|
@@ -9,13 +9,7 @@ module SparkApi
|
|
9
9
|
self.element_name = "tourofhomes"
|
10
10
|
|
11
11
|
def initialize(attributes={})
|
12
|
-
|
13
|
-
unless attributes['Date'].nil?
|
14
|
-
date = Date.parse(attributes['Date'])
|
15
|
-
attributes['Date'] = date
|
16
|
-
attributes['StartTime'] = Time.parse("#{date}T#{attributes['StartTime']}") unless attributes['StartTime'].nil?
|
17
|
-
attributes['EndTime'] = Time.parse("#{date}T#{attributes['EndTime']}") unless attributes['EndTime'].nil?
|
18
|
-
end
|
12
|
+
self.class.parse_date_start_and_end_times attributes
|
19
13
|
super(attributes)
|
20
14
|
end
|
21
15
|
|
@@ -2,10 +2,10 @@
|
|
2
2
|
"D": {
|
3
3
|
"Photos": [
|
4
4
|
{
|
5
|
-
"
|
6
|
-
"Name": "FBS Logo",
|
5
|
+
"Picture": "iVBORw0KGgoAAAANSUhEUgAAANAAAAAuCAYAAABZPJcdAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAC2tJREFUeNrsXWtwG9UVXjf7rzPx+pXp8CfiFwXa8TrkQZK2lqCBpIVahj6AAJZ4BGhpbQUolEdlQUsLBSwDBYYAknn2MVRypy0BErSeaRLyIF5PW2h/ZfnZ+LX2TP8p2p6zuutXpNXdh3bvOrozZ9aTrHTPPfd853UfatI0jWtqauL8bGcmtofJn+qazkMy12iN5mFDDNhtTYwAyGwECCiV0AR54r/JADY1SBO17q43RXigsVgPhH8L5OlWS51+8cbBCv3ivyU9HKoxRwqZMwn4kl2UYwgeUaBOoBCRqZdN+u8LuyP4B8+GCTD936UKFl0GPHm7MVFjOKg14iGJPdC8gZPbV+ZdE1gUcB2asFKpQQ5KGeA3ZR3IMgaPfhiPyMr88lywmzFRSEkAlG4dgEaB8gAoxTfg3Pk6gn0I4oMQAzEKC3OFcsiAXNCY9J5+6WbVgiwRMBkYh8iaAgbBA1ltBqCGzoxvR+80ooOpyxswwWQjqHM+hBVBaSiXcZBTBECkUIKnQIwlc+0LHDMIqguB8LUhoFNnxrflgOqq1OvuGBHBSp4CCutW3w+q5oHYohBQDuQlUMizACQwN4ZV7IGqNQypomdObtNj8TUbDmddBc+eDEx2iU1LqZVYNN7oWQaABk34zrDqeXwB0Lo9r4krEsxuLAJo3D7PY/Hiya0Yi6f4DUckF8YFY9IYnmyNVf1LguzSp1++Ra0g0xhLxQLPAdRx+6shApROYm3CmqapJMmfIM/s5L5bFe6OrQM+xeLh4idb0zqQLjliuyQO4xrg3C1Hu1xD0FjWQayspSvw3BOEhM5VAHXc9gqGST26ci5Wn/KkKhaffOU2hUEZoPJHAUi9ACLZxpgxPu+30a9E5KKvaYFs1DoiiPbNiINeBBJRxCx64p6VACIyjVrs3ygYebEQr7oGoI5b9xmgierJ3uJg4gieyVdvV12c4HqGdePFE5fG+Y0fZy0qp1WFQYOSALl4Z0wo5Qs8OQ1n86APKQ7L92Ug0UYCK/kVLSpzrxPenXhoWwDquOVlgVjuvhXrHKh8w5Ov7bFoBZgJMTLFE1s4fuPRrAXp91n4/jjIJuv5qDw0UGgwQT8SBBghm/yGLbzdCzKV/FIYSwDqiL+EAklyWilWIRxJTGbutOc+2QrRM8XjAKJNtUGky0Mr0VrLFMgn68uIPK7CgUKrIJs8MbL15FcGmUp+KgsVgNpjL+oeB1xdsoL7TE1l70o7YmLTUX0zXvHY5pV7wwxL1M25v2/MHETHNsv85mNyDddPaykVkNHguVREgD7H7AIIPrue8tVR5osI7X0vYG4zVMEdI3giUyM/dC1pA4U1qnRLPduyVjy6OUyA1E0AVq/SMZalu2rMNG2IkvJ1lv3IMcsVV7ufDXEBaaYAar/5+SFwpwNVEreuqdfvVjxneMsxiQArXQbUJpFb2KzJuSl4Eb47xm85njUJNbotFA58BFDJjz5powWFCX7dBFD7Tc+iVS+YCCEy9cZPFCYGsOW4XgbGHKz4sQ4mLCnHXPr6JCmMOFFMGWSlnoMAol3HkSp8doyj20u43m/9O2svXNuNwwLEoAUgEWPnCpQChWDy0Bt/6XEZKA5Mng+UdWPPVvHIRtEkVq8mo6Xk+5klCh5dzZNAh2KYH1L2O2qXX6AY9CUyA6C23WmBbN4TTZQqyzHe+K0nFCADSJJDEIVNNJNmk+MY5z+C7G9EtQKc3ekQ0CB8V4ayT2X6zf58BX5lC/NTgD7DvunaCteZq1Hpkqff3qtwAWn8tk+Q10jx8CWYxw3Z/BrBYWjU3XbDM15V4CSYH8luCAd8Fhz0jSV9qzlovIpcZYvzUwDeZbtVufbdC6phnH4uh95vJVRqALVd//QAxQJWoI5QLwFSunhog8S5fa6EzmqHOW/PBkkOqnBe8pmYfuceqdJ/oJEGfZQ5a8sWIufyMkf7Dc8oRJ6jU2/vzVcFUOt1v4G8p5TkVnHjt5+Ui3/f0EtA5BJ+glEtYpDPxMzv7kvX4HmYKy8l+NlCpCAVa7v+KQRTavqde7Nn50BaKQok6K7enMKtP3hCCCyIvnYS86GUxRyoYjgBcghTyMt7qhZqskEKUKQWeLDBO1k9lGOHdwxRM23XPZkDEpYDqKT1AXGUNBBoT/T1cUxyFQsAqhhmWJCXt8QurypQYub390vUk1XS4uRzLMk4CjS0ogpXgtgXrRcV9bd+73GRC3ajXdjM89+Qq+R9JUaJWV7BapdyoDtIVFHMzB8fAO9figCpjMk51vb9X8V0ALV9FwZjDYH4fqb12l8GNpTTS8t03mfYxDo2PJB9Cz4O+iPSgehBGd7vApIYG0ey7IFwt4H1eFA//996zWOhgAJIpQBPnu+ekExL2I0cyEk+gfpD54nefUgBisBnuoAwN1JZGEPbtb8Q+el3H5Zar3nUjhqiBRlv6U3FZ3PJfLAAVLPcKVddo7Be2UIQerWYKjnk1cmm12ZSBqcN743rv6hPwc786RFjXuKgdyKpkhn9dTvg3e5O/yi/YJHtrY/oQmiJDuqnLGfzg0pAPFBfjbWuOH/ZP1QX1oCwjYFcBn0eL9VrbvAJuiCS8jONQobh/TD0K1ntB4y2sQcy71w8Zfm09qYwr0ly9JuSu40ytuTQnWEZ/FRLz88zQEyHdcWPvmpmJfUjGgCe2ivhTsMqTwHkHZ8ABiw9RyyEWX2s6MZMLpklYaJCG8YRAGkjLl04F8OLBVu+80gOKMoceA5+Bfeu5aqu92haFxV4DKvu0R4zVzyQh3zOjj6qWtjMy5SezORTqoW1wjKAZv/8WB7cmGJhF2wtigLlhKsfngXKAEWBfK3aFQ9cLJjccpkGivCX/1Oh10nN813OTkIUr/nEXdaU/Qp+60YF3mVamfFL3DwmZwWXeUHBxAhxwlUPytzivXCK+pfHJU/A8+HFIqdVjMsVPd/Z8S/rfATo0JdP54GsFqQkVsQFDkVuufohqncXAITKLHz7AdxiUc+dBss2/EF/hhIr3OLv/yxVbiRZ/euvVfvguQjGo9fshRW5zjC/49PBQCnl6gUQU63lqp9Re8RlxxlAURPCt+43vIZXLbSk6lEpHo7YsU7FDy4K6xUVbdkOYx04QGn+ik+d7Sxn+7ZP/3nVtODuVrFwrdZZR7rVvz0Rb971U85jELkXrr1/IYKwf4UQZN3jXPlZ1s28Ijj64MutPFZua1UYkxct73LFOxHm3nsy3rzzXrXO4Zx7oNl/4eLFItqCNzPyrWF+52fuT1AjhKvYQG8wghmycrhubv9TTABI2HVfmXf6c1EKbzKoRPOV9+AKOnO/OlDc/+UQyaXK93Bz+jVIxpVYGKLl+Z3/ru+krMIQDuY77KAXnINOG1cdyyb8iB7oniiUQd9ZJYUwa2Om11rNvf90vvmKvTJBpS/1+gtaZ4XD710QIxPUrT/Ld3AbK9Ep3ZXu+o/MolIGLAcq+MCdZMK3FW/gR8vXvFhx7gP9WGtv845EmHijkJccfumL/xNIn0ZIhmBRA6SUXINX0zYcUL6zKmCD+m7suQ+HUHnPX/vNgRhXvnvNkyqL9Pl5Cr/rD1n2dLJRRHBDCecPpJUA8q2SyMf6b6TCgHHQXTC8CFC2jr9vuoSYVMsA8a+xSKCEWiKAfCMlwKHowLf9+0DzB4bRI0lrL/9xguRHPX7lSf7oZKMK59CCR+YPPqcGUMbxuQPPLkREjn9giwgBvzC79rK7BQIi4+L30OoFUCMHspt4oxLOf/S8GjC+FR08B59bVvRw9SceiVCyhDgAVIhb3L7TTZ4C12jnYkOdGAEdkQLGN/I7AsCpmIfX9Ve6QVgKQe6yQ09rIz8Kkz+N53oTb8XmZY4ND1RtruQlFvtzVMD5wm+lAMhYWjKGCTIOaa6Gp/y/AAMAjD5uOnbaAbgAAAAASUVORK5CYII=",
|
7
6
|
"FileName": "logo_fbs.png",
|
8
|
-
"
|
7
|
+
"Name": "FBS Logo",
|
8
|
+
"Caption": "Creators of flexMLS!"
|
9
9
|
}
|
10
10
|
]
|
11
11
|
}
|
@@ -4,16 +4,16 @@
|
|
4
4
|
{"ResourceUri":"/listings/20060725224713296297000000/tourofhomes/20101127153422574618000000",
|
5
5
|
"Id": "20101127153422574618000000",
|
6
6
|
"Date": "10/01/2010",
|
7
|
-
"StartTime": "09:00
|
8
|
-
"EndTime": "12:00
|
7
|
+
"StartTime": "09:00-07:00",
|
8
|
+
"EndTime": "12:00-07:00",
|
9
9
|
"Comments": "Wonderful home; must see!",
|
10
10
|
"AdditionalInfo": [{"Hosted By": "Joe Smith"}, {"Host Phone": "123-456-7890"}, {"Tour Area": "North-Central"}]
|
11
11
|
},
|
12
12
|
{"ResourceUri":"/listings/20060725224713296297000000/tourofhomes/20101127153422174618000000",
|
13
13
|
"Id": "20101127153422174618000000",
|
14
14
|
"Date": "10/08/2010",
|
15
|
-
"StartTime": "09:00
|
16
|
-
"EndTime": "12:00
|
15
|
+
"StartTime": "09:00-07:00",
|
16
|
+
"EndTime": "12:00-07:00",
|
17
17
|
"Comments": "Wonderful home; must see!",
|
18
18
|
"AdditionalInfo": [{"Hosted By": "Joe Smith"}, {"Host Phone": "123-456-7890"}, {"Tour Area": "North-Central"}]
|
19
19
|
}
|
@@ -0,0 +1,251 @@
|
|
1
|
+
# Several workarounds to get our test setup working in multiple ruby environments credit is
|
2
|
+
# attributed to all the projects that found solutions to these test headaches!
|
3
|
+
|
4
|
+
|
5
|
+
module SparkApiTest
|
6
|
+
# directly taken from Rails 3.1's OrderedHash
|
7
|
+
# see https://github.com/rails/rails/blob/master/activesupport/lib/active_support/ordered_hash.rb
|
8
|
+
|
9
|
+
# The order of iteration over hashes in Ruby 1.8 is undefined. For example, you do not know the
|
10
|
+
# order in which +keys+ will return keys, or +each+ yield pairs. <tt>ActiveSupport::OrderedHash</tt>
|
11
|
+
# implements a hash that preserves insertion order, as in Ruby 1.9:
|
12
|
+
#
|
13
|
+
# oh = ActiveSupport::OrderedHash.new
|
14
|
+
# oh[:a] = 1
|
15
|
+
# oh[:b] = 2
|
16
|
+
# oh.keys # => [:a, :b], this order is guaranteed
|
17
|
+
#
|
18
|
+
# <tt>ActiveSupport::OrderedHash</tt> is namespaced to prevent conflicts with other implementations.
|
19
|
+
class OrderedHash < ::Hash #:nodoc:
|
20
|
+
def to_yaml_type
|
21
|
+
"!tag:yaml.org,2002:omap"
|
22
|
+
end
|
23
|
+
|
24
|
+
def encode_with(coder)
|
25
|
+
coder.represent_seq '!omap', map { |k,v| { k => v } }
|
26
|
+
end
|
27
|
+
|
28
|
+
def to_yaml(opts = {})
|
29
|
+
if YAML.const_defined?(:ENGINE) && !YAML::ENGINE.syck?
|
30
|
+
return super
|
31
|
+
end
|
32
|
+
|
33
|
+
YAML.quick_emit(self, opts) do |out|
|
34
|
+
out.seq(taguri) do |seq|
|
35
|
+
each do |k, v|
|
36
|
+
seq.add(k => v)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def nested_under_indifferent_access
|
43
|
+
self
|
44
|
+
end
|
45
|
+
|
46
|
+
# Hash is ordered in Ruby 1.9!
|
47
|
+
if RUBY_VERSION < '1.9'
|
48
|
+
|
49
|
+
# In MRI the Hash class is core and written in C. In particular, methods are
|
50
|
+
# programmed with explicit C function calls and polymorphism is not honored.
|
51
|
+
#
|
52
|
+
# For example, []= is crucial in this implementation to maintain the @keys
|
53
|
+
# array but hash.c invokes rb_hash_aset() originally. This prevents method
|
54
|
+
# reuse through inheritance and forces us to reimplement stuff.
|
55
|
+
#
|
56
|
+
# For instance, we cannot use the inherited #merge! because albeit the algorithm
|
57
|
+
# itself would work, our []= is not being called at all by the C code.
|
58
|
+
|
59
|
+
def initialize(*args, &block)
|
60
|
+
super
|
61
|
+
@keys = []
|
62
|
+
end
|
63
|
+
|
64
|
+
def self.[](*args)
|
65
|
+
ordered_hash = new
|
66
|
+
|
67
|
+
if (args.length == 1 && args.first.is_a?(Array))
|
68
|
+
args.first.each do |key_value_pair|
|
69
|
+
next unless (key_value_pair.is_a?(Array))
|
70
|
+
ordered_hash[key_value_pair[0]] = key_value_pair[1]
|
71
|
+
end
|
72
|
+
|
73
|
+
return ordered_hash
|
74
|
+
end
|
75
|
+
|
76
|
+
unless (args.size % 2 == 0)
|
77
|
+
raise ArgumentError.new("odd number of arguments for Hash")
|
78
|
+
end
|
79
|
+
|
80
|
+
args.each_with_index do |val, ind|
|
81
|
+
next if (ind % 2 != 0)
|
82
|
+
ordered_hash[val] = args[ind + 1]
|
83
|
+
end
|
84
|
+
|
85
|
+
ordered_hash
|
86
|
+
end
|
87
|
+
|
88
|
+
def initialize_copy(other)
|
89
|
+
super
|
90
|
+
# make a deep copy of keys
|
91
|
+
@keys = other.keys
|
92
|
+
end
|
93
|
+
|
94
|
+
def []=(key, value)
|
95
|
+
@keys << key unless has_key?(key)
|
96
|
+
super
|
97
|
+
end
|
98
|
+
|
99
|
+
def delete(key)
|
100
|
+
if has_key? key
|
101
|
+
index = @keys.index(key)
|
102
|
+
@keys.delete_at index
|
103
|
+
end
|
104
|
+
super
|
105
|
+
end
|
106
|
+
|
107
|
+
def delete_if
|
108
|
+
super
|
109
|
+
sync_keys!
|
110
|
+
self
|
111
|
+
end
|
112
|
+
|
113
|
+
def reject!
|
114
|
+
super
|
115
|
+
sync_keys!
|
116
|
+
self
|
117
|
+
end
|
118
|
+
|
119
|
+
def reject(&block)
|
120
|
+
dup.reject!(&block)
|
121
|
+
end
|
122
|
+
|
123
|
+
def keys
|
124
|
+
@keys.dup
|
125
|
+
end
|
126
|
+
|
127
|
+
def values
|
128
|
+
@keys.collect { |key| self[key] }
|
129
|
+
end
|
130
|
+
|
131
|
+
def to_hash
|
132
|
+
self
|
133
|
+
end
|
134
|
+
|
135
|
+
def to_a
|
136
|
+
@keys.map { |key| [ key, self[key] ] }
|
137
|
+
end
|
138
|
+
|
139
|
+
def each_key
|
140
|
+
return to_enum(:each_key) unless block_given?
|
141
|
+
@keys.each { |key| yield key }
|
142
|
+
self
|
143
|
+
end
|
144
|
+
|
145
|
+
def each_value
|
146
|
+
return to_enum(:each_value) unless block_given?
|
147
|
+
@keys.each { |key| yield self[key]}
|
148
|
+
self
|
149
|
+
end
|
150
|
+
|
151
|
+
def each
|
152
|
+
return to_enum(:each) unless block_given?
|
153
|
+
@keys.each {|key| yield [key, self[key]]}
|
154
|
+
self
|
155
|
+
end
|
156
|
+
|
157
|
+
alias_method :each_pair, :each
|
158
|
+
|
159
|
+
alias_method :select, :find_all
|
160
|
+
|
161
|
+
def clear
|
162
|
+
super
|
163
|
+
@keys.clear
|
164
|
+
self
|
165
|
+
end
|
166
|
+
|
167
|
+
def shift
|
168
|
+
k = @keys.first
|
169
|
+
v = delete(k)
|
170
|
+
[k, v]
|
171
|
+
end
|
172
|
+
|
173
|
+
def merge!(other_hash)
|
174
|
+
if block_given?
|
175
|
+
other_hash.each { |k, v| self[k] = key?(k) ? yield(k, self[k], v) : v }
|
176
|
+
else
|
177
|
+
other_hash.each { |k, v| self[k] = v }
|
178
|
+
end
|
179
|
+
self
|
180
|
+
end
|
181
|
+
|
182
|
+
alias_method :update, :merge!
|
183
|
+
|
184
|
+
def merge(other_hash, &block)
|
185
|
+
dup.merge!(other_hash, &block)
|
186
|
+
end
|
187
|
+
|
188
|
+
# When replacing with another hash, the initial order of our keys must come from the other hash -ordered or not.
|
189
|
+
def replace(other)
|
190
|
+
super
|
191
|
+
@keys = other.keys
|
192
|
+
self
|
193
|
+
end
|
194
|
+
|
195
|
+
def invert
|
196
|
+
OrderedHash[self.to_a.map!{|key_value_pair| key_value_pair.reverse}]
|
197
|
+
end
|
198
|
+
|
199
|
+
private
|
200
|
+
def sync_keys!
|
201
|
+
@keys.delete_if {|k| !has_key?(k)}
|
202
|
+
end
|
203
|
+
end
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
# Originally based on a fix found in Koala, a facebook client gem that has compatibile licensing.
|
208
|
+
# See: https://raw.github.com/arsduo/koala/master/spec/support/json_testing_fix.rb
|
209
|
+
#
|
210
|
+
# when testing across Ruby versions, we found that JSON string creation inconsistently ordered keys
|
211
|
+
# which is a problem because our mock testing service ultimately matches strings to see if requests are mocked
|
212
|
+
# this fix solves that problem by ensuring all hashes are created with a consistent key order every time
|
213
|
+
module MultiJson
|
214
|
+
|
215
|
+
class << self
|
216
|
+
def dump_with_ordering(object)
|
217
|
+
# if it's a hash, recreate it with k/v pairs inserted in sorted-by-key order
|
218
|
+
# (for some reason, REE fails if we don't assign the ternary result as a local variable
|
219
|
+
# separately from calling encode_original)
|
220
|
+
dump_original(sort_object(object))
|
221
|
+
end
|
222
|
+
|
223
|
+
alias_method :dump_original, :dump
|
224
|
+
alias_method :dump, :dump_with_ordering
|
225
|
+
|
226
|
+
def load_with_ordering(string)
|
227
|
+
sort_object(load_original(string))
|
228
|
+
end
|
229
|
+
|
230
|
+
alias_method :load_original, :load
|
231
|
+
alias_method :load, :load_with_ordering
|
232
|
+
|
233
|
+
private
|
234
|
+
|
235
|
+
def sort_object(object)
|
236
|
+
if object.is_a?(Hash)
|
237
|
+
sort_hash(object)
|
238
|
+
elsif object.is_a?(Array)
|
239
|
+
object.collect {|item| item.is_a?(Hash) ? sort_hash(item) : item}
|
240
|
+
else
|
241
|
+
object
|
242
|
+
end
|
243
|
+
end
|
244
|
+
|
245
|
+
def sort_hash(unsorted_hash)
|
246
|
+
sorted_hash = SparkApiTest::OrderedHash.new(sorted_hash)
|
247
|
+
unsorted_hash.keys.sort {|a, b| a.to_s <=> b.to_s}.inject(sorted_hash) {|hash, k| hash[k] = unsorted_hash[k]; hash}
|
248
|
+
end
|
249
|
+
end
|
250
|
+
end
|
251
|
+
|
data/spec/mock_helper.rb
CHANGED
@@ -22,7 +22,7 @@ def stub_api_delete(service_path, stub_fixture="success.json", opts={})
|
|
22
22
|
sig = $test_client.authenticator.sign_token("/#{SparkApi.version}#{service_path}", params)
|
23
23
|
s=stub_request(:delete, "#{SparkApi.endpoint}/#{SparkApi.version}#{service_path}").
|
24
24
|
with(:query => {
|
25
|
-
:ApiSig => sig
|
25
|
+
:ApiSig => sig
|
26
26
|
}.merge(params))
|
27
27
|
if(block_given?)
|
28
28
|
yield s
|
@@ -32,8 +32,8 @@ def stub_api_delete(service_path, stub_fixture="success.json", opts={})
|
|
32
32
|
log_stub(s)
|
33
33
|
end
|
34
34
|
def stub_api_post(service_path, body, stub_fixture="success.json", opts={})
|
35
|
-
body_hash =
|
36
|
-
body_str = body_hash
|
35
|
+
body_hash = MultiJson.load(fixture(body).read)
|
36
|
+
body_str = MultiJson.dump(body_hash)
|
37
37
|
params = {:ApiUser => "foobar", :AuthToken => "c401736bf3d3f754f07c04e460e09573"}.merge(opts)
|
38
38
|
sig = $test_client.authenticator.sign_token("/#{SparkApi.version}#{service_path}", params, body_str)
|
39
39
|
s=stub_request(:post, "#{SparkApi.endpoint}/#{SparkApi.version}#{service_path}").
|
@@ -50,8 +50,8 @@ def stub_api_post(service_path, body, stub_fixture="success.json", opts={})
|
|
50
50
|
log_stub(s)
|
51
51
|
end
|
52
52
|
def stub_api_put(service_path, body, stub_fixture="success.json", opts={})
|
53
|
-
body_hash =
|
54
|
-
body_str = body_hash
|
53
|
+
body_hash = MultiJson.load(fixture(body).read)
|
54
|
+
body_str = MultiJson.dump(body_hash)
|
55
55
|
params = {:ApiUser => "foobar", :AuthToken => "c401736bf3d3f754f07c04e460e09573"}.merge(opts)
|
56
56
|
sig = $test_client.authenticator.sign_token("/#{SparkApi.version}#{service_path}", params, body_str)
|
57
57
|
s=stub_request(:put, "#{SparkApi.endpoint}/#{SparkApi.version}#{service_path}").
|
data/spec/spec_helper.rb
CHANGED
@@ -13,6 +13,7 @@ require path + '/spark_api'
|
|
13
13
|
require 'spark_api'
|
14
14
|
require File.expand_path('../mock_helper', __FILE__)
|
15
15
|
require File.expand_path('../json_helper', __FILE__)
|
16
|
+
require File.expand_path('../json_hash_test_support', __FILE__)
|
16
17
|
|
17
18
|
|
18
19
|
FileUtils.mkdir 'log' unless File.exists? 'log'
|
@@ -47,3 +48,4 @@ RSpec.configure do |config|
|
|
47
48
|
config.alias_example_to :on_post_it, :method => 'POST'
|
48
49
|
config.alias_example_to :on_delete_it, :method => 'DELETE'
|
49
50
|
end
|
51
|
+
|
@@ -15,7 +15,7 @@ describe SparkApi::Authentication::OAuth2 do
|
|
15
15
|
it "should authenticate the api credentials" do
|
16
16
|
stub_request(:post, provider.access_uri).
|
17
17
|
with(:body =>
|
18
|
-
|
18
|
+
'{"client_id":"example-id","client_secret":"example-password","code":"my_code","grant_type":"authorization_code","redirect_uri":"https://exampleapp.fbsdata.com/oauth-callback"}'
|
19
19
|
).
|
20
20
|
to_return(:body => fixture("oauth2/access.json"), :status=>200)
|
21
21
|
subject.authenticate.access_token.should eq("04u7h-4cc355-70k3n")
|
@@ -25,7 +25,7 @@ describe SparkApi::Authentication::OAuth2 do
|
|
25
25
|
it "should raise an error when api credentials are invalid" do
|
26
26
|
s=stub_request(:post, provider.access_uri).
|
27
27
|
with(:body =>
|
28
|
-
|
28
|
+
'{"client_id":"example-id","client_secret":"example-password","code":"my_code","grant_type":"authorization_code","redirect_uri":"https://exampleapp.fbsdata.com/oauth-callback"}'
|
29
29
|
).
|
30
30
|
to_return(:body => fixture("oauth2/error.json"), :status=>400)
|
31
31
|
expect {subject.authenticate()}.to raise_error(SparkApi::ClientError){ |e| e.status.should == 400 }
|
@@ -103,22 +103,24 @@ describe SparkApi::Authentication::OAuth2 do
|
|
103
103
|
count = 0
|
104
104
|
refresh_count = 0
|
105
105
|
stub_request(:post, provider.access_uri).
|
106
|
-
with(:body =>
|
107
|
-
|
106
|
+
with(:body =>
|
107
|
+
'{"client_id":"example-id","client_secret":"example-password","code":"my_code","grant_type":"authorization_code","redirect_uri":"https://exampleapp.fbsdata.com/oauth-callback"}'
|
108
|
+
).to_return do
|
108
109
|
count += 1
|
109
110
|
{:body => fixture("oauth2/access_with_old_refresh.json"), :status=>200}
|
110
111
|
end
|
111
112
|
stub_request(:post, provider.access_uri).
|
112
|
-
with(:body =>
|
113
|
-
|
113
|
+
with(:body =>
|
114
|
+
'{"client_id":"example-id","client_secret":"example-password","grant_type":"refresh_token","redirect_uri":"https://exampleapp.fbsdata.com/oauth-callback","refresh_token":"0ld-r3fr35h-70k3n"}'
|
115
|
+
).to_return do
|
114
116
|
refresh_count += 1
|
115
117
|
{:body => fixture("oauth2/access_with_refresh.json"), :status=>200}
|
116
118
|
end
|
117
119
|
# Make sure the auth request goes out twice.
|
118
120
|
# Fail the first time, but then return the correct value after reauthentication
|
119
121
|
stub_request(:get, "https://api.sparkapi.com/#{SparkApi.version}/listings/1234").
|
120
|
-
|
121
|
-
|
122
|
+
to_return(:body => fixture('errors/expired.json'), :status => 401).times(1).then.
|
123
|
+
to_return(:body => fixture('listings/with_documents.json'))
|
122
124
|
client.get("/listings/1234")
|
123
125
|
count.should eq(1)
|
124
126
|
refresh_count.should eq(1)
|
@@ -129,16 +131,17 @@ describe SparkApi::Authentication::OAuth2 do
|
|
129
131
|
it "should reset the session and reauthenticate" do
|
130
132
|
count = 0
|
131
133
|
stub_request(:post, provider.access_uri).
|
132
|
-
with(:body =>
|
133
|
-
|
134
|
+
with(:body =>
|
135
|
+
'{"client_id":"example-id","client_secret":"example-password","code":"my_code","grant_type":"authorization_code","redirect_uri":"https://exampleapp.fbsdata.com/oauth-callback"}'
|
136
|
+
).to_return do
|
134
137
|
count += 1
|
135
138
|
{:body => fixture("oauth2/access.json"), :status=>200}
|
136
139
|
end
|
137
140
|
# Make sure the auth request goes out twice.
|
138
141
|
# Fail the first time, but then return the correct value after reauthentication
|
139
142
|
stub_request(:get, "https://api.sparkapi.com/#{SparkApi.version}/listings/1234").
|
140
|
-
|
141
|
-
|
143
|
+
to_return(:body => fixture('errors/expired.json'), :status => 401).times(1).then.
|
144
|
+
to_return(:body => fixture('listings/with_documents.json'))
|
142
145
|
|
143
146
|
client.get("/listings/1234")
|
144
147
|
count.should eq(2)
|
@@ -234,10 +237,9 @@ describe "password authentication" do
|
|
234
237
|
subject {client.authenticator }
|
235
238
|
it "should authenticate the api credentials with username and password" do
|
236
239
|
stub_request(:post, provider.access_uri).
|
237
|
-
with(:body =>
|
238
|
-
'{"
|
239
|
-
).
|
240
|
-
to_return(:body => fixture("oauth2/access.json"), :status=>200)
|
240
|
+
with(:body =>
|
241
|
+
'{"client_id":"example-id","client_secret":"example-secret","grant_type":"password","password":"example-password","username":"example-user"}'
|
242
|
+
).to_return(:body => fixture("oauth2/access.json"), :status=>200)
|
241
243
|
subject.authenticate.access_token.should eq("04u7h-4cc355-70k3n")
|
242
244
|
subject.authenticate.expires_in.should eq(60)
|
243
245
|
end
|
@@ -47,11 +47,10 @@ describe ListingCart do
|
|
47
47
|
stub_api_get("/#{subject.class.element_name}", 'listing_carts/listing_cart.json')
|
48
48
|
resource = subject.class.get.first
|
49
49
|
stub_api_put("/#{subject.class.element_name}/#{resource.Id}", 'listing_carts/new.json', 'success.json')
|
50
|
+
resource.Name = "My Cart's Name"
|
50
51
|
resource.ListingIds = ['20110112234857732941000000',
|
51
52
|
'20110302120238448431000000',
|
52
53
|
'20110510011212354751000000']
|
53
|
-
|
54
|
-
resource.Name = "My Cart's Name"
|
55
54
|
resource.save.should be(true)
|
56
55
|
resource.ResourceUri.should eq("/v1/listingcarts/20100912153422758914000000")
|
57
56
|
end
|
@@ -8,8 +8,8 @@ describe OpenHouse do
|
|
8
8
|
'ResourceUri'=>"/v1/listings/20060412165917817933000000/openhouses/20101127153422574618000000",
|
9
9
|
'Id'=>"20060412165917817933000000",
|
10
10
|
'Date'=>"10/01/2010",
|
11
|
-
'StartTime'=>"09:00
|
12
|
-
'EndTime'=>"12:00
|
11
|
+
'StartTime'=>"09:00-07:00",
|
12
|
+
'EndTime'=>"12:00-07:00"
|
13
13
|
)
|
14
14
|
end
|
15
15
|
|
@@ -21,8 +21,14 @@ describe OpenHouse do
|
|
21
21
|
start_time = DateTime.new(2010,10,1,9,0,0, "-0700")
|
22
22
|
end_time = DateTime.new(2010,10,1,12,0,0, "-0700")
|
23
23
|
subject.Date.should eq(Date.new(2010,10,1))
|
24
|
-
|
25
|
-
|
24
|
+
# TRYING TO MAKE THIS BACKWARDS COMPATIBLE AND NOT HAPPY ABOUT IT
|
25
|
+
if RUBY_VERSION < '1.9'
|
26
|
+
subject.StartTime.should eq(Time.parse(start_time.to_s))
|
27
|
+
subject.EndTime.should eq(Time.parse(end_time.to_s))
|
28
|
+
else
|
29
|
+
subject.StartTime.should eq(start_time.to_time)
|
30
|
+
subject.EndTime.should eq(end_time.to_time)
|
31
|
+
end
|
26
32
|
end
|
27
33
|
|
28
34
|
context "/listings/<listing_id>/openhouses", :support do
|
@@ -23,8 +23,14 @@ describe TourOfHome do
|
|
23
23
|
start_time = DateTime.new(2010,10,1,9,0,0, "-0700")
|
24
24
|
end_time = DateTime.new(2010,10,1,23,0,0, "-0700")
|
25
25
|
subject.Date.should eq(Date.new(2010,10,1))
|
26
|
-
|
27
|
-
|
26
|
+
# TRYING TO MAKE THIS BACKWARDS COMPATIBLE AND NOT HAPPY ABOUT IT
|
27
|
+
if RUBY_VERSION < '1.9'
|
28
|
+
subject.StartTime.should eq(Time.parse(start_time.to_s))
|
29
|
+
subject.EndTime.should eq(Time.parse(end_time.to_s))
|
30
|
+
else
|
31
|
+
subject.StartTime.should eq(start_time.to_time)
|
32
|
+
subject.EndTime.should eq(end_time.to_time)
|
33
|
+
end
|
28
34
|
end
|
29
35
|
|
30
36
|
context "/listings/<listing_id>/tourofhomes", :support do
|
metadata
CHANGED
@@ -1,13 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: spark_api
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
hash:
|
4
|
+
hash: 17
|
5
5
|
prerelease:
|
6
6
|
segments:
|
7
7
|
- 1
|
8
8
|
- 1
|
9
|
-
-
|
10
|
-
version: 1.1.
|
9
|
+
- 1
|
10
|
+
version: 1.1.1
|
11
11
|
platform: ruby
|
12
12
|
authors:
|
13
13
|
- Brandon Hornseth
|
@@ -16,7 +16,7 @@ autorequire:
|
|
16
16
|
bindir: bin
|
17
17
|
cert_chain: []
|
18
18
|
|
19
|
-
date: 2012-08-
|
19
|
+
date: 2012-08-31 00:00:00 Z
|
20
20
|
dependencies:
|
21
21
|
- !ruby/object:Gem::Dependency
|
22
22
|
version_requirements: &id001 !ruby/object:Gem::Requirement
|
@@ -151,12 +151,12 @@ dependencies:
|
|
151
151
|
requirements:
|
152
152
|
- - ~>
|
153
153
|
- !ruby/object:Gem::Version
|
154
|
-
hash:
|
154
|
+
hash: 35
|
155
155
|
segments:
|
156
156
|
- 2
|
157
|
-
-
|
157
|
+
- 11
|
158
158
|
- 0
|
159
|
-
version: 2.
|
159
|
+
version: 2.11.0
|
160
160
|
requirement: *id008
|
161
161
|
prerelease: false
|
162
162
|
name: rspec
|
@@ -199,12 +199,12 @@ dependencies:
|
|
199
199
|
requirements:
|
200
200
|
- - ~>
|
201
201
|
- !ruby/object:Gem::Version
|
202
|
-
hash:
|
202
|
+
hash: 11
|
203
203
|
segments:
|
204
204
|
- 1
|
205
|
-
-
|
206
|
-
-
|
207
|
-
version: 1.
|
205
|
+
- 7
|
206
|
+
- 0
|
207
|
+
version: 1.7.0
|
208
208
|
requirement: *id011
|
209
209
|
prerelease: false
|
210
210
|
name: ci_reporter
|
@@ -444,6 +444,7 @@ files:
|
|
444
444
|
- spec/spec_helper.rb
|
445
445
|
- spec/oauth2_helper.rb
|
446
446
|
- spec/json_helper.rb
|
447
|
+
- spec/json_hash_test_support.rb
|
447
448
|
- spec/mock_helper.rb
|
448
449
|
homepage: https://github.com/sparkapi/spark_api
|
449
450
|
licenses: []
|
@@ -606,4 +607,5 @@ test_files:
|
|
606
607
|
- spec/spec_helper.rb
|
607
608
|
- spec/oauth2_helper.rb
|
608
609
|
- spec/json_helper.rb
|
610
|
+
- spec/json_hash_test_support.rb
|
609
611
|
- spec/mock_helper.rb
|