spark_api 1.1.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. data/History.txt +14 -0
  2. data/README.md +42 -233
  3. data/VERSION +1 -1
  4. data/lib/spark_api.rb +1 -0
  5. data/lib/spark_api/authentication/oauth2.rb +39 -9
  6. data/lib/spark_api/authentication/oauth2_impl/cli_provider.rb +96 -0
  7. data/lib/spark_api/authentication/oauth2_impl/faraday_middleware.rb +28 -0
  8. data/lib/spark_api/authentication/oauth2_impl/grant_type_base.rb +7 -2
  9. data/lib/spark_api/authentication/oauth2_impl/single_session_provider.rb +27 -0
  10. data/lib/spark_api/cli.rb +29 -10
  11. data/lib/spark_api/cli/api_auth.rb +1 -0
  12. data/lib/spark_api/cli/oauth2.rb +23 -8
  13. data/lib/spark_api/cli/setup.rb +31 -0
  14. data/lib/spark_api/configuration.rb +10 -2
  15. data/lib/spark_api/configuration/yaml.rb +6 -1
  16. data/lib/spark_api/connection.rb +1 -1
  17. data/lib/spark_api/errors.rb +48 -0
  18. data/lib/spark_api/models.rb +3 -0
  19. data/lib/spark_api/models/account.rb +9 -1
  20. data/lib/spark_api/models/base.rb +24 -19
  21. data/lib/spark_api/models/concerns.rb +7 -0
  22. data/lib/spark_api/models/concerns/destroyable.rb +32 -0
  23. data/lib/spark_api/models/concerns/savable.rb +66 -0
  24. data/lib/spark_api/models/contact.rb +6 -25
  25. data/lib/spark_api/models/dirty.rb +57 -0
  26. data/lib/spark_api/models/finders.rb +0 -4
  27. data/lib/spark_api/models/saved_search.rb +10 -0
  28. data/lib/spark_api/models/subresource.rb +5 -1
  29. data/lib/spark_api/models/subscription.rb +52 -0
  30. data/lib/spark_api/request.rb +17 -4
  31. data/lib/spark_api/response.rb +0 -37
  32. data/script/combined_flow_example.rb +3 -3
  33. data/script/oauth2_example.rb +3 -3
  34. data/spec/fixtures/base.json +3 -1
  35. data/spec/fixtures/contacts/new.json +2 -3
  36. data/spec/fixtures/contacts/new_empty.json +2 -3
  37. data/spec/fixtures/contacts/new_notify.json +1 -1
  38. data/spec/fixtures/{listings/saved_search.json → saved_searches/get.json} +1 -1
  39. data/spec/fixtures/saved_searches/new.json +8 -0
  40. data/spec/fixtures/saved_searches/post.json +12 -0
  41. data/spec/fixtures/saved_searches/update.json +6 -0
  42. data/spec/fixtures/subscriptions/get.json +19 -0
  43. data/spec/fixtures/subscriptions/new.json +13 -0
  44. data/spec/fixtures/subscriptions/post.json +10 -0
  45. data/spec/fixtures/subscriptions/put.json +12 -0
  46. data/spec/fixtures/subscriptions/subscribe.json +5 -0
  47. data/spec/fixtures/subscriptions/update.json +6 -0
  48. data/spec/mock_helper.rb +14 -6
  49. data/spec/oauth2_helper.rb +2 -0
  50. data/spec/spec_helper.rb +4 -7
  51. data/spec/unit/spark_api/authentication/api_auth_spec.rb +0 -1
  52. data/spec/unit/spark_api/authentication/oauth2_impl/faraday_middleware_spec.rb +32 -0
  53. data/spec/unit/spark_api/authentication/oauth2_impl/single_session_provider_spec.rb +9 -0
  54. data/spec/unit/spark_api/authentication/oauth2_spec.rb +29 -3
  55. data/spec/unit/spark_api/authentication_spec.rb +4 -10
  56. data/spec/unit/spark_api/configuration/yaml_spec.rb +4 -3
  57. data/spec/unit/spark_api/configuration_spec.rb +22 -8
  58. data/spec/unit/spark_api/models/account_spec.rb +5 -0
  59. data/spec/unit/spark_api/models/base_spec.rb +27 -0
  60. data/spec/unit/spark_api/models/concerns/destroyable_spec.rb +28 -0
  61. data/spec/unit/spark_api/models/concerns/savable_spec.rb +61 -0
  62. data/spec/unit/spark_api/models/contact_spec.rb +5 -5
  63. data/spec/unit/spark_api/models/dirty_spec.rb +46 -0
  64. data/spec/unit/spark_api/models/finders_spec.rb +0 -7
  65. data/spec/unit/spark_api/models/saved_search_spec.rb +34 -3
  66. data/spec/unit/spark_api/models/shared_listing_spec.rb +1 -1
  67. data/spec/unit/spark_api/models/subscription_spec.rb +106 -0
  68. data/spec/unit/spark_api/multi_client_spec.rb +14 -4
  69. data/spec/unit/spark_api/paginate_spec.rb +0 -1
  70. data/spec/unit/spark_api/request_spec.rb +10 -0
  71. data/spec/unit/spark_api_spec.rb +0 -3
  72. metadata +127 -45
  73. data/lib/spark_api/authentication/oauth2_impl/password_provider.rb +0 -24
@@ -35,6 +35,34 @@ module SparkApi
35
35
 
36
36
  end
37
37
  Faraday.register_middleware :response, :oauth2_impl => FaradayMiddleware
38
+
39
+ #==OAuth2 Faraday response middleware
40
+ # HTTP Response after filter to package oauth2 responses and bubble up basic api errors.
41
+ class SparkbarFaradayMiddleware < Faraday::Response::Middleware
42
+
43
+ def initialize(app)
44
+ super(app)
45
+ end
46
+
47
+ def on_complete(env)
48
+ body = MultiJson.decode(env[:body])
49
+ SparkApi.logger.debug("[sparkbar] Response Body: #{body.inspect}")
50
+ unless body.is_a?(Hash)
51
+ raise InvalidResponse, "The server response could not be understood"
52
+ end
53
+ case env[:status]
54
+ when 200..299
55
+ SparkApi.logger.debug("[sparkbar] Success!")
56
+ if body.include?("token")
57
+ env[:body] = body
58
+ return
59
+ end
60
+ end
61
+ raise ClientError, {:message => "Unable to process sparkbar token #{body.inspect}", :code =>0, :status => env[:status]}
62
+ end
63
+
64
+ end
65
+ Faraday.register_middleware :response, :sparkbar_impl => SparkbarFaradayMiddleware
38
66
 
39
67
  end
40
68
  end
@@ -45,6 +45,11 @@ module SparkApi
45
45
  response.expires_in = provider.session_timeout if response.expires_in.nil?
46
46
  SparkApi.logger.debug("[oauth2] New session created #{response}")
47
47
  response
48
+ rescue Faraday::Error::ConnectionFailed => e
49
+ if @client.ssl_verify && e.message =~ /certificate verify failed/
50
+ SparkApi.logger.error(SparkApi::Errors.ssl_verification_error)
51
+ end
52
+ raise e
48
53
  end
49
54
 
50
55
  def needs_refreshing?
@@ -66,9 +71,9 @@ module SparkApi
66
71
  opts = {
67
72
  :headers => @client.headers
68
73
  }
69
- opts[:ssl] = {:verify => false }
74
+
75
+ opts[:ssl] = {:verify => false } unless @client.ssl_verify
70
76
  opts[:url] = endpoint
71
- Faraday.register_middleware :response, :faraday_middleware => FaradayMiddleware
72
77
  conn = Faraday::Connection.new(opts) do |conn|
73
78
  conn.response :oauth2_impl
74
79
  conn.adapter Faraday.default_adapter
@@ -0,0 +1,27 @@
1
+ module SparkApi
2
+ module Authentication
3
+
4
+ class SingleSessionProvider < BaseOAuth2Provider
5
+
6
+ def initialize(credentials)
7
+ @access_token = credentials.delete(:access_token)
8
+ super(credentials)
9
+ end
10
+
11
+ def load_session
12
+ @session ||= SparkApi::Authentication::OAuthSession.new({
13
+ :access_token => @access_token
14
+ })
15
+ end
16
+
17
+ def save_session session
18
+ @session = session
19
+ end
20
+
21
+ def destroy_session
22
+ @session = nil
23
+ end
24
+
25
+ end
26
+ end
27
+ end
@@ -14,9 +14,13 @@ module SparkApi
14
14
  module CLI
15
15
  class ConsoleCLI
16
16
  OPTIONS_ENV = {
17
- :endpoint => "API_ENDPOINT",
17
+ :endpoint => "API_ENDPOINT",
18
+ :no_verify => "NO_VERIFY",
18
19
  # OAUTH2 Options
19
20
  :access_uri => "ACCESS_URI",
21
+ :authorization_uri => "AUTHORIZATION_URI",
22
+ :redirect_uri => "REDIRECT_URI",
23
+ :code => "CODE",
20
24
  :username=> "USERNAME",
21
25
  :password=> "PASSWORD",
22
26
  :client_id=> "CLIENT_ID",
@@ -53,6 +57,9 @@ module SparkApi
53
57
  :endpoint => ENV[OPTIONS_ENV[:endpoint]],
54
58
  # OAUTH2 Options
55
59
  :access_uri => ENV[OPTIONS_ENV[:access_uri]],
60
+ :authorization_uri => ENV[OPTIONS_ENV[:authorization_uri]],
61
+ :redirect_uri => ENV[OPTIONS_ENV[:redirect_uri]],
62
+ :code => ENV[OPTIONS_ENV[:code]],
56
63
  :username=> ENV[OPTIONS_ENV[:username]],
57
64
  :password=> ENV[OPTIONS_ENV[:password]],
58
65
  :client_id=> ENV[OPTIONS_ENV[:client_id]],
@@ -61,6 +68,7 @@ module SparkApi
61
68
  :api_key => ENV[OPTIONS_ENV[:api_key]],
62
69
  :api_secret => ENV[OPTIONS_ENV[:api_secret]],
63
70
  :api_user => ENV[OPTIONS_ENV[:api_user]],
71
+ :no_verify => ENV.fetch(OPTIONS_ENV[:no_verify], false),
64
72
  :console => ENV[OPTIONS_ENV[:console]]
65
73
  }
66
74
  cli_options = {}
@@ -77,15 +85,15 @@ module SparkApi
77
85
  Options are:
78
86
  BANNER
79
87
  opts.separator ""
80
- opts.on("-o","--oauth2",
81
- "Run the API using OAuth2 credentials. The client defaults to using the Spark API authentication mode for access. ",
82
- "See http://sparkplatform.com/docs/authentication/authentication for more information on authentication types.",
83
- "Default: false") { |arg| cli_options[:oauth2] = arg }
84
88
  opts.on("-e","--endpoint ENDPOINT",
85
89
  "URI of the API.",
86
- "Default: ENV['#{OPTIONS_ENV[:endpoint]}']") { |arg| cli_options[:endpoint] = arg }
90
+ "Default: ENV['#{OPTIONS_ENV[:endpoint]}'] or #{SparkApi::Configuration::DEFAULT_ENDPOINT}") { |arg| cli_options[:endpoint] = arg }
87
91
 
88
92
  # OAUTH2
93
+ opts.on("-o","--oauth2",
94
+ "Run the API using OAuth2 credentials. The client defaults to using the Spark API authentication mode for access. ",
95
+ "See http://sparkplatform.com/docs/authentication/authentication for more information on authentication types.",
96
+ "Default: false") { |arg| cli_options[:oauth2] = arg }
89
97
  opts.on("--client_id CLIENT_ID",
90
98
  "OAuth2 client id",
91
99
  "Default: ENV['#{OPTIONS_ENV[:client_id]}']") { |arg| cli_options[:client_id] = arg }
@@ -99,9 +107,17 @@ module SparkApi
99
107
  "OAuth2 password",
100
108
  "Default: ENV['#{OPTIONS_ENV[:password]}']") { |arg| cli_options[:password] = arg }
101
109
  opts.on("--access_uri ACCESS_URI",
102
- "OAuth2 path for granting access to the application",
103
- "Default: ENV['#{OPTIONS_ENV[:access_uri]}']") { |arg| cli_options[:access_uri] = arg }
104
-
110
+ "OAuth2 path for granting access to the application using one of the supported grant types.",
111
+ "Default: ENV['#{OPTIONS_ENV[:access_uri]}'] or #{SparkApi::Configuration::DEFAULT_ACCESS_URI}") { |arg| cli_options[:access_uri] = arg }
112
+ opts.on("--redirect_uri REDIRECT_URI",
113
+ "OAuth2 application redirect for the client id. This needs to match whatever value is saved for the application's client_id",
114
+ "Default: ENV['#{OPTIONS_ENV[:redirect_uri]}'] or #{SparkApi::Configuration::DEFAULT_REDIRECT_URI}") { |arg| cli_options[:redirect_uri] = arg }
115
+ opts.on("--authorization_uri AUTHORIZATION_URI",
116
+ "OAuth2 authorization endpoint for a user. This is where the user should go to sign in and authorize client id.",
117
+ "Default: ENV['#{OPTIONS_ENV[:authorization_uri]}'] or #{SparkApi::Configuration::DEFAULT_AUTH_ENDPOINT}") { |arg| cli_options[:authorization_uri] = arg }
118
+ opts.on("--code CODE",
119
+ "OAuth2 authorization code used for granting application access to the API for a user") { |arg| cli_options[:code] = arg }
120
+
105
121
  # API AUTH
106
122
  opts.on("--api_key API_KEY",
107
123
  "Authentication key for running the api using the default api authentication",
@@ -112,9 +128,12 @@ module SparkApi
112
128
  opts.on("--api_user API_USER",
113
129
  "ID of the Spark user to run the client as.",
114
130
  "Default: ENV['#{OPTIONS_ENV[:api_user]}']") { |arg| cli_options[:api_user] = arg }
115
-
131
+
132
+ # General
116
133
  opts.on("-f", "--file FILE",
117
134
  "Load configuration for yaml file.") { |arg| file_options = parse_file_options(arg) }
135
+ opts.on("--no_verify",
136
+ "Disable SSL Certificate verification. This is useful for development servers.") { |arg| cli_options[:no_verify] = arg }
118
137
  opts.on("-d", "--debug",
119
138
  "Show detailed request logging information.") { |arg| cli_options[:debug] = arg }
120
139
  opts.on("-v", "--version",
@@ -5,4 +5,5 @@ SparkApi.configure do |config|
5
5
  config.api_secret = ENV["API_SECRET"]
6
6
  config.api_user = ENV["API_USER"] if ENV["API_USER"]
7
7
  config.endpoint = ENV["API_ENDPOINT"] if ENV["API_ENDPOINT"]
8
+ config.ssl_verify = ! (ENV["NO_VERIFY"].downcase=='true') if ENV["NO_VERIFY"]
8
9
  end
@@ -1,14 +1,29 @@
1
1
  require File.dirname(__FILE__) + "/../cli/setup"
2
2
 
3
+
3
4
  SparkApi.configure do |config|
4
- config.oauth2_provider = SparkApi::Authentication::OAuth2Impl::PasswordProvider.new(
5
- :authorization_uri=> ENV["AUTH_URI"],
6
- :access_uri => ENV["ACCESS_URI"],
7
- :username=> ENV["USERNAME"],
8
- :password=> ENV["PASSWORD"],
9
- :client_id=> ENV["CLIENT_ID"],
10
- :client_secret=> ENV["CLIENT_SECRET"]
11
- )
5
+ oauth = {
6
+ :authorization_uri=> ENV.fetch("AUTH_URI", SparkApi::Configuration::DEFAULT_AUTHORIZATION_URI),
7
+ :access_uri => ENV.fetch("ACCESS_URI", SparkApi::Configuration::DEFAULT_ACCESS_URI),
8
+ :redirect_uri => ENV.fetch("REDIRECT_URI", SparkApi::Configuration::DEFAULT_REDIRECT_URI),
9
+ :client_id=> ENV["CLIENT_ID"],
10
+ :client_secret=> ENV["CLIENT_SECRET"]
11
+ }
12
+ oauth[:username] = ENV["USERNAME"] if ENV.include?("USERNAME")
13
+ oauth[:password] = ENV["PASSWORD"] if ENV.include?("PASSWORD")
14
+ config.oauth2_provider = SparkApi::Authentication::OAuth2Impl::CLIProvider.new(oauth)
15
+ unless (oauth.include?(:username) && oauth.include?(:password))
16
+ config.oauth2_provider.grant_type = :authorization_code
17
+ config.oauth2_provider.code = ENV["CODE"] if ENV.include?("CODE")
18
+ end
12
19
  config.authentication_mode = SparkApi::Authentication::OAuth2
13
20
  config.endpoint = ENV["API_ENDPOINT"] if ENV["API_ENDPOINT"]
21
+ config.ssl_verify = ! (ENV["NO_VERIFY"].downcase=='true') if ENV["NO_VERIFY"]
14
22
  end
23
+
24
+ # Enables saving and loading serialized oauth2 sessions for the system user.
25
+ def persist_sessions! my_alias = nil
26
+ warn "Warning: persistent session mode saves access tokens in clear text on the filesystem."
27
+ SparkApi.client.oauth2_provider.session_alias = my_alias unless my_alias.nil?
28
+ SparkApi.client.oauth2_provider.persistent_sessions = true
29
+ end
@@ -45,3 +45,34 @@ include SparkApi::Models
45
45
  def c
46
46
  SparkApi.client
47
47
  end
48
+
49
+ # Straight up HTTP functions y'all!!!
50
+
51
+ def get(path, options={})
52
+ c.get(path, options)
53
+ end
54
+
55
+ def post(path, body = nil, options={})
56
+ c.post(path, body, options)
57
+ end
58
+
59
+ def put(path, body = nil, options={})
60
+ c.put(path, body, options)
61
+ end
62
+
63
+ def delete(path, options={})
64
+ c.delete(path, options)
65
+ end
66
+
67
+ # Handy session persistence
68
+ def save_oauth2_session! session_alias = "default"
69
+
70
+ rescue => e
71
+ puts "Unable to save the oauth2 session: #{e.message}"
72
+ end
73
+
74
+ def load_oauth2_session session_alias = "default"
75
+ c.oauth2_provider.session = ""
76
+ rescue => e
77
+ puts "Unable to find a saved oauth2 session: #{e.message}"
78
+ end
@@ -5,13 +5,16 @@ module SparkApi
5
5
 
6
6
  # valid configuration options
7
7
  VALID_OPTION_KEYS = [:api_key, :api_secret, :api_user, :endpoint,
8
- :user_agent, :version, :ssl, :oauth2_provider, :authentication_mode,
8
+ :user_agent, :version, :ssl, :ssl_verify, :oauth2_provider, :authentication_mode,
9
9
  :auth_endpoint, :callback].freeze
10
10
  OAUTH2_KEYS = [:authorization_uri, :access_uri, :client_id, :client_secret,
11
11
  # Requirements for authorization_code grant type
12
12
  :redirect_uri,
13
13
  # Requirements for password grant type
14
- :username, :password
14
+ :username, :password,
15
+ # Requirements for single session keys
16
+ :access_token,
17
+ :sparkbar_uri
15
18
  ]
16
19
 
17
20
  require File.expand_path('../configuration/yaml', __FILE__)
@@ -23,10 +26,14 @@ module SparkApi
23
26
  DEFAULT_API_SECRET = nil
24
27
  DEFAULT_API_USER = nil
25
28
  DEFAULT_ENDPOINT = 'https://api.sparkapi.com'
29
+ DEFAULT_REDIRECT_URI = "https://sparkplatform.com/oauth2/callback"
26
30
  DEFAULT_AUTH_ENDPOINT = 'https://sparkplatform.com/openid' # Ignored for Spark API Auth
31
+ DEFAULT_AUTHORIZATION_URI = 'https://sparkplatform.com/oauth2'
27
32
  DEFAULT_VERSION = 'v1'
33
+ DEFAULT_ACCESS_URI = "#{DEFAULT_ENDPOINT}/#{DEFAULT_VERSION}/oauth2/grant"
28
34
  DEFAULT_USER_AGENT = "Spark API Ruby Gem #{VERSION}"
29
35
  DEFAULT_SSL = true
36
+ DEFAULT_SSL_VERIFY = true
30
37
  DEFAULT_OAUTH2 = nil
31
38
 
32
39
  X_SPARK_API_USER_AGENT = "X-SparkApi-User-Agent"
@@ -57,6 +64,7 @@ module SparkApi
57
64
  self.oauth2_provider = DEFAULT_OAUTH2
58
65
  self.user_agent = DEFAULT_USER_AGENT
59
66
  self.ssl = DEFAULT_SSL
67
+ self.ssl_verify = DEFAULT_SSL_VERIFY
60
68
  self.version = DEFAULT_VERSION
61
69
  self
62
70
  end
@@ -5,7 +5,7 @@ module SparkApi
5
5
  module Configuration
6
6
  class YamlConfig
7
7
  KEY_CONFIGURATIONS = VALID_OPTION_KEYS + [:oauth2] + OAUTH2_KEYS
8
- DEFAULT_OAUTH2_PROVIDER = "SparkApi::Authentication::OAuth2Impl::PasswordProvider"
8
+ DEFAULT_OAUTH2_PROVIDER = "SparkApi::Authentication::OAuth2Impl::CLIProvider"
9
9
  attr_accessor *KEY_CONFIGURATIONS
10
10
  attr_reader :client_keys, :oauth2_keys, :provider
11
11
 
@@ -26,10 +26,15 @@ module SparkApi
26
26
  def oauth2?
27
27
  return oauth2 == true
28
28
  end
29
+
30
+ def ssl_verify?
31
+ return ssl_verify == true
32
+ end
29
33
 
30
34
  def name
31
35
  @name
32
36
  end
37
+
33
38
  def api_env
34
39
  if env.include? "SPARK_API_ENV"
35
40
  env["SPARK_API_ENV"]
@@ -12,7 +12,7 @@ module SparkApi
12
12
  :headers => headers
13
13
  }
14
14
  if(force_ssl || self.ssl)
15
- opts[:ssl] = {:verify => false }
15
+ opts[:ssl] = {:verify => false } unless self.ssl_verify
16
16
  opts[:url] = @endpoint.sub /^http:/, "https:"
17
17
  else
18
18
  opts[:url] = @endpoint.sub /^https:/, "http:"
@@ -0,0 +1,48 @@
1
+ module SparkApi
2
+
3
+ # All known response codes listed in the API
4
+ module ResponseCodes
5
+ NOT_FOUND = 404
6
+ METHOD_NOT_ALLOWED = 405
7
+ INVALID_KEY = 1000
8
+ DISABLED_KEY = 1010
9
+ API_USER_REQUIRED = 1015
10
+ SESSION_TOKEN_EXPIRED = 1020
11
+ SSL_REQUIRED = 1030
12
+ INVALID_JSON = 1035
13
+ INVALID_FIELD = 1040
14
+ MISSING_PARAMETER = 1050
15
+ INVALID_PARAMETER = 1053
16
+ CONFLICTING_DATA = 1055
17
+ NOT_AVAILABLE= 1500
18
+ RATE_LIMIT_EXCEEDED = 1550
19
+ end
20
+
21
+ # Errors built from API responses
22
+ class InvalidResponse < StandardError; end
23
+ class ClientError < StandardError
24
+ attr_reader :code, :status, :details
25
+ def initialize (options = {})
26
+ # Support the standard initializer for errors
27
+ opts = options.is_a?(Hash) ? options : {:message => options.to_s}
28
+ @code = opts[:code]
29
+ @status = opts[:status]
30
+ @details = opts[:details]
31
+ super(opts[:message])
32
+ end
33
+
34
+ end
35
+ class NotFound < ClientError; end
36
+ class PermissionDenied < ClientError; end
37
+ class NotAllowed < ClientError; end
38
+ class BadResourceRequest < ClientError; end
39
+
40
+ # =Errors
41
+ # Error messages and other error handling
42
+ module Errors
43
+ def self.ssl_verification_error
44
+ "SSL verification problem: if connecting to a trusted but non production API endpoint, " +
45
+ "set 'ssl_verify' to false in the configuration or add '--no_verify' to the CLI command."
46
+ end
47
+ end
48
+ end
@@ -1,7 +1,9 @@
1
+ require 'spark_api/models/dirty'
1
2
  require 'spark_api/models/base'
2
3
  require 'spark_api/models/constraint'
3
4
  require 'spark_api/models/finders'
4
5
  require 'spark_api/models/subresource'
6
+ require 'spark_api/models/concerns'
5
7
 
6
8
  require 'spark_api/models/account'
7
9
  require 'spark_api/models/connect_prefs'
@@ -26,6 +28,7 @@ require 'spark_api/models/tour_of_home'
26
28
  require 'spark_api/models/video'
27
29
  require 'spark_api/models/virtual_tour'
28
30
  require 'spark_api/models/rental_calendar'
31
+ require 'spark_api/models/subscription'
29
32
 
30
33
  module SparkApi
31
34
  module Models
@@ -39,7 +39,15 @@ module SparkApi
39
39
  def primary_img(typ)
40
40
  if @images.is_a?(Array)
41
41
  matches = @images.select {|i| i.Type == typ}
42
- matches.sort {|a,b| a.Name <=> b.Name }.first
42
+ matches.sort do |a,b|
43
+ if a.Name.nil? && !b.Name.nil?
44
+ 1
45
+ elsif b.Name.nil? && !a.Name.nil?
46
+ -1
47
+ else
48
+ a.Name.to_s <=> b.Name.to_s
49
+ end
50
+ end.first
43
51
  else
44
52
  nil
45
53
  end
@@ -5,10 +5,10 @@ module SparkApi
5
5
  # active model type niceties.
6
6
  class Base
7
7
  extend Paginate
8
+ include Dirty
8
9
 
9
10
  attr_accessor :attributes, :errors
10
- attr_reader :changed
11
-
11
+
12
12
  # Name of the resource as related to the path name
13
13
  def self.element_name
14
14
  # TODO I'd love to pull in active model at this point to provide default naming
@@ -18,7 +18,7 @@ module SparkApi
18
18
  def self.element_name=(name)
19
19
  @element_name = name
20
20
  end
21
-
21
+
22
22
  # Resource path prefix, prepended to the url
23
23
  def self.prefix
24
24
  @prefix ||= "/"
@@ -29,7 +29,7 @@ module SparkApi
29
29
  def self.path
30
30
  "#{prefix}#{element_name}"
31
31
  end
32
-
32
+
33
33
  def self.connection
34
34
  SparkApi.client
35
35
  end
@@ -40,7 +40,6 @@ module SparkApi
40
40
  def initialize(attributes={})
41
41
  @attributes = {}
42
42
  @errors = []
43
- @changed = []
44
43
  load(attributes)
45
44
  end
46
45
 
@@ -49,7 +48,7 @@ module SparkApi
49
48
  @attributes[key.to_s] = val
50
49
  end
51
50
  end
52
-
51
+
53
52
  def self.get(options={})
54
53
  collect(connection.get(path, options))
55
54
  end
@@ -61,28 +60,28 @@ module SparkApi
61
60
  def self.count(options={})
62
61
  connection.get(path, options.merge({:_pagination=>"count"}))
63
62
  end
64
-
63
+
65
64
  def method_missing(method_symbol, *arguments)
66
65
  method_name = method_symbol.to_s
67
66
 
68
- if method_name =~ /(=|\?)$/
67
+ if method_name =~ /(=|\?|_will_change!)$/
69
68
  case $1
70
69
  when "="
71
70
  write_attribute($`, arguments.first)
72
71
  # TODO figure out a nice way to present setters for the standard fields
73
72
  when "?"
74
- if attributes.include?($`)
75
- attributes[$`] ? true : false
76
- else
77
- raise NoMethodError
78
- end
73
+ raise NoMethodError unless attributes.include?($`)
74
+ attributes[$`] ? true : false
75
+ when "_will_change!"
76
+ raise NoMethodError unless attributes.include?($`)
77
+ attribute_will_change!($`)
79
78
  end
80
79
  else
81
80
  return attributes[method_name] if attributes.include?(method_name)
82
81
  super # GTFO
83
82
  end
84
83
  end
85
-
84
+
86
85
  def respond_to?(method_symbol, include_private=false)
87
86
  if super
88
87
  return true
@@ -93,26 +92,32 @@ module SparkApi
93
92
  true
94
93
  elsif method_name =~ /(\?)$/
95
94
  attributes.include?($`)
95
+ elsif method_name =~ /(\w*)_will_change!$/
96
+ attributes.include?($1)
96
97
  else
97
98
  attributes.include?(method_name)
98
99
  end
99
100
 
100
101
  end
101
102
  end
102
-
103
+
103
104
  def parse_id(uri)
104
105
  uri[/\/.*\/(.+)$/, 1]
105
106
  end
106
-
107
+
108
+ def persisted?
109
+ !(@attributes['Id'].nil? && @attributes['ResourceUri'].nil?)
110
+ end
111
+
107
112
  protected
108
-
113
+
109
114
  def write_attribute(attribute, value)
110
115
  unless attributes[attribute] == value
116
+ attribute_will_change!(attribute)
111
117
  attributes[attribute] = value
112
- @changed << attribute unless @changed.include?(attribute)
113
118
  end
114
119
  end
115
-
120
+
116
121
  end
117
122
  end
118
123
  end