spark_api 1.3.17 → 1.3.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.3.17
1
+ 1.3.18
data/lib/spark_api/cli.rb CHANGED
@@ -15,7 +15,7 @@ module SparkApi
15
15
  class ConsoleCLI
16
16
  OPTIONS_ENV = {
17
17
  :endpoint => "API_ENDPOINT",
18
- :no_verify => "NO_VERIFY",
18
+ :ssl_verify => "SSL_VERIFY",
19
19
  # OAUTH2 Options
20
20
  :access_uri => "ACCESS_URI",
21
21
  :authorization_uri => "AUTHORIZATION_URI",
@@ -68,7 +68,7 @@ module SparkApi
68
68
  :api_key => ENV[OPTIONS_ENV[:api_key]],
69
69
  :api_secret => ENV[OPTIONS_ENV[:api_secret]],
70
70
  :api_user => ENV[OPTIONS_ENV[:api_user]],
71
- :no_verify => ENV.fetch(OPTIONS_ENV[:no_verify], false),
71
+ :ssl_verify => ENV.fetch(OPTIONS_ENV[:ssl_verify], true),
72
72
  :console => ENV[OPTIONS_ENV[:console]]
73
73
  }
74
74
  cli_options = {}
@@ -133,7 +133,7 @@ module SparkApi
133
133
  opts.on("-f", "--file FILE",
134
134
  "Load configuration for yaml file.") { |arg| file_options = parse_file_options(arg) }
135
135
  opts.on("--no_verify",
136
- "Disable SSL Certificate verification. This is useful for development servers.") { |arg| cli_options[:no_verify] = arg }
136
+ "Disable SSL Certificate verification. This is useful for development servers.") { |arg| cli_options[:ssl_verify] = !arg }
137
137
  opts.on("-d", "--debug",
138
138
  "Show detailed request logging information.") { |arg| cli_options[:debug] = arg }
139
139
  opts.on("-v", "--version",
@@ -146,7 +146,7 @@ module SparkApi
146
146
  options = env_options.merge(file_options.merge(cli_options))
147
147
  return options
148
148
  end
149
-
149
+
150
150
  def self.setup_api_auth
151
151
  " -r #{File.dirname(__FILE__) + '/../../lib/spark_api/cli/api_auth.rb'}"
152
152
  end
@@ -5,5 +5,5 @@ SparkApi.configure do |config|
5
5
  config.api_secret = ENV["API_SECRET"]
6
6
  config.api_user = ENV["API_USER"] if ENV["API_USER"]
7
7
  config.endpoint = ENV["API_ENDPOINT"] if ENV["API_ENDPOINT"]
8
- config.ssl_verify = ! (ENV["NO_VERIFY"].downcase=='true') if ENV["NO_VERIFY"]
8
+ config.ssl_verify = ENV["SSL_VERIFY"].downcase != 'false' if ENV["SSL_VERIFY"]
9
9
  end
@@ -18,7 +18,7 @@ SparkApi.configure do |config|
18
18
  end
19
19
  config.authentication_mode = SparkApi::Authentication::OAuth2
20
20
  config.endpoint = ENV["API_ENDPOINT"] if ENV["API_ENDPOINT"]
21
- config.ssl_verify = ! (ENV["NO_VERIFY"].downcase=='true') if ENV["NO_VERIFY"]
21
+ config.ssl_verify = ENV["SSL_VERIFY"].downcase != 'false' if ENV["SSL_VERIFY"]
22
22
  end
23
23
 
24
24
  # Enables saving and loading serialized oauth2 sessions for the system user.
@@ -26,4 +26,4 @@ def persist_sessions! my_alias = nil
26
26
  warn "Warning: persistent session mode saves access tokens in clear text on the filesystem."
27
27
  SparkApi.client.oauth2_provider.session_alias = my_alias unless my_alias.nil?
28
28
  SparkApi.client.oauth2_provider.persistent_sessions = true
29
- end
29
+ end
@@ -15,53 +15,52 @@ module SparkApi
15
15
  @rental_calendars = []
16
16
  @documents = []
17
17
  @constraints = []
18
- #@tour_of_homes = []
19
-
18
+ @tour_of_homes = []
19
+ @open_houses = []
20
+
20
21
  if attributes.has_key?('StandardFields')
21
22
  pics, vids, tours, docs, ohouses, tourhomes = attributes['StandardFields'].values_at('Photos','Videos', 'VirtualTours', 'Documents', 'OpenHouses', 'TourOfHomes')
22
23
  end
23
-
24
+
24
25
  if attributes.has_key?('RentalCalendar')
25
26
  rentalcalendars = attributes['RentalCalendar']
26
27
  end
27
-
28
+
28
29
  if pics != nil
29
- pics.collect { |pic| @photos.push(Photo.new(pic)) }
30
+ setup_attribute(@photos, pics, Photo)
30
31
  attributes['StandardFields'].delete('Photos')
31
32
  end
32
-
33
+
33
34
  if vids != nil
34
- vids.collect { |vid| @videos.push(Video.new(vid)) }
35
+ setup_attribute(@videos, vids, Video)
35
36
  attributes['StandardFields'].delete('Videos')
36
37
  end
37
38
 
38
39
  if tours != nil
39
- tours.collect { |tour| @virtual_tours.push(VirtualTour.new(tour)) }
40
+ setup_attribute(@virtual_tours, tours, VirtualTour)
40
41
  attributes['StandardFields'].delete('VirtualTours')
41
42
  end
42
43
 
43
44
  if docs != nil
44
- docs.collect { |doc| @documents.push(Document.new(doc)) }
45
+ setup_attribute(@documents, docs, Document)
45
46
  attributes['StandardFields'].delete('Documents')
46
47
  end
47
-
48
+
48
49
  if ohouses != nil
49
- @open_houses = []
50
- ohouses.collect { |ohouse| @open_houses.push(OpenHouse.new(ohouse)) }
50
+ setup_attribute(@open_houses, ohouses, OpenHouse)
51
51
  attributes['StandardFields'].delete('OpenHouses')
52
52
  end
53
-
53
+
54
54
  if tourhomes != nil
55
- @tour_of_homes = []
56
- tourhomes.collect { |tourhome| @tour_of_homes.push(TourOfHome.new(tourhome)) }
55
+ setup_attribute(@tour_of_homes, tourhomes, TourOfHome)
57
56
  attributes['StandardFields'].delete('TourOfHomes')
58
57
  end
59
-
58
+
60
59
  if rentalcalendars != nil
61
- rentalcalendars.collect { |rentalcalendar| @rental_calendars.push(RentalCalendar.new(rentalcalendar)) }
60
+ setup_attribute(@rental_calendars, rentalcalendars, RentalCalendar)
62
61
  attributes.delete('RentalCalendar')
63
- end
64
-
62
+ end
63
+
65
64
  super(attributes)
66
65
  end
67
66
 
@@ -106,7 +105,7 @@ module SparkApi
106
105
  @open_houses ||= OpenHouse.find_by_listing_key(self.Id, arguments)
107
106
  return @open_houses unless @open_houses.nil?
108
107
  end
109
-
108
+
110
109
  def my_notes
111
110
  Note.build_subclass.tap do |note|
112
111
  note.prefix = "/listings/#{self.ListingKey}"
@@ -161,6 +160,7 @@ module SparkApi
161
160
  end
162
161
  false
163
162
  end
163
+
164
164
  def save!(arguments={})
165
165
  writable_changed_keys = changed & WRITEABLE_FIELDS
166
166
  if writable_changed_keys.empty?
@@ -200,7 +200,7 @@ module SparkApi
200
200
  end
201
201
  editable
202
202
  end
203
-
203
+
204
204
  def ExpirationDate
205
205
  attributes["ExpirationDate"]
206
206
  end
@@ -215,7 +215,7 @@ module SparkApi
215
215
  attributes['StandardFields'].include?(method_symbol.to_s) rescue false
216
216
  end
217
217
  end
218
-
218
+
219
219
  private
220
220
 
221
221
  # TODO trim this down so we're only overriding the StandardFields access
@@ -236,7 +236,7 @@ module SparkApi
236
236
  super # GTFO
237
237
  end
238
238
  end
239
-
239
+
240
240
  def build_hash(keys)
241
241
  hash = {}
242
242
  keys.each do |key|
@@ -244,7 +244,15 @@ module SparkApi
244
244
  end
245
245
  hash
246
246
  end
247
-
247
+
248
+ # Determine if passed a model or hash and push instance of Klass onto attributes array
249
+ def setup_attribute(attributes, collection, klass)
250
+ collection.collect do |c|
251
+ attribute = (c.instance_of? klass) ? c : klass.new(c)
252
+ attributes.push(attribute)
253
+ end
254
+ end
255
+
248
256
  end
249
257
  end
250
258
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: spark_api
3
3
  version: !ruby/object:Gem::Version
4
- hash: 57
4
+ hash: 63
5
5
  prerelease:
6
6
  segments:
7
7
  - 1
8
8
  - 3
9
- - 17
10
- version: 1.3.17
9
+ - 18
10
+ version: 1.3.18
11
11
  platform: ruby
12
12
  authors:
13
13
  - Brandon Hornseth
@@ -16,7 +16,7 @@ autorequire:
16
16
  bindir: bin
17
17
  cert_chain: []
18
18
 
19
- date: 2014-11-07 00:00:00 Z
19
+ date: 2014-11-14 00:00:00 Z
20
20
  dependencies:
21
21
  - !ruby/object:Gem::Dependency
22
22
  type: :runtime