spark_api 1.1.2 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. data/History.txt +14 -0
  2. data/README.md +42 -233
  3. data/VERSION +1 -1
  4. data/lib/spark_api.rb +1 -0
  5. data/lib/spark_api/authentication/oauth2.rb +39 -9
  6. data/lib/spark_api/authentication/oauth2_impl/cli_provider.rb +96 -0
  7. data/lib/spark_api/authentication/oauth2_impl/faraday_middleware.rb +28 -0
  8. data/lib/spark_api/authentication/oauth2_impl/grant_type_base.rb +7 -2
  9. data/lib/spark_api/authentication/oauth2_impl/single_session_provider.rb +27 -0
  10. data/lib/spark_api/cli.rb +29 -10
  11. data/lib/spark_api/cli/api_auth.rb +1 -0
  12. data/lib/spark_api/cli/oauth2.rb +23 -8
  13. data/lib/spark_api/cli/setup.rb +31 -0
  14. data/lib/spark_api/configuration.rb +10 -2
  15. data/lib/spark_api/configuration/yaml.rb +6 -1
  16. data/lib/spark_api/connection.rb +1 -1
  17. data/lib/spark_api/errors.rb +48 -0
  18. data/lib/spark_api/models.rb +3 -0
  19. data/lib/spark_api/models/account.rb +9 -1
  20. data/lib/spark_api/models/base.rb +24 -19
  21. data/lib/spark_api/models/concerns.rb +7 -0
  22. data/lib/spark_api/models/concerns/destroyable.rb +32 -0
  23. data/lib/spark_api/models/concerns/savable.rb +66 -0
  24. data/lib/spark_api/models/contact.rb +6 -25
  25. data/lib/spark_api/models/dirty.rb +57 -0
  26. data/lib/spark_api/models/finders.rb +0 -4
  27. data/lib/spark_api/models/saved_search.rb +10 -0
  28. data/lib/spark_api/models/subresource.rb +5 -1
  29. data/lib/spark_api/models/subscription.rb +52 -0
  30. data/lib/spark_api/request.rb +17 -4
  31. data/lib/spark_api/response.rb +0 -37
  32. data/script/combined_flow_example.rb +3 -3
  33. data/script/oauth2_example.rb +3 -3
  34. data/spec/fixtures/base.json +3 -1
  35. data/spec/fixtures/contacts/new.json +2 -3
  36. data/spec/fixtures/contacts/new_empty.json +2 -3
  37. data/spec/fixtures/contacts/new_notify.json +1 -1
  38. data/spec/fixtures/{listings/saved_search.json → saved_searches/get.json} +1 -1
  39. data/spec/fixtures/saved_searches/new.json +8 -0
  40. data/spec/fixtures/saved_searches/post.json +12 -0
  41. data/spec/fixtures/saved_searches/update.json +6 -0
  42. data/spec/fixtures/subscriptions/get.json +19 -0
  43. data/spec/fixtures/subscriptions/new.json +13 -0
  44. data/spec/fixtures/subscriptions/post.json +10 -0
  45. data/spec/fixtures/subscriptions/put.json +12 -0
  46. data/spec/fixtures/subscriptions/subscribe.json +5 -0
  47. data/spec/fixtures/subscriptions/update.json +6 -0
  48. data/spec/mock_helper.rb +14 -6
  49. data/spec/oauth2_helper.rb +2 -0
  50. data/spec/spec_helper.rb +4 -7
  51. data/spec/unit/spark_api/authentication/api_auth_spec.rb +0 -1
  52. data/spec/unit/spark_api/authentication/oauth2_impl/faraday_middleware_spec.rb +32 -0
  53. data/spec/unit/spark_api/authentication/oauth2_impl/single_session_provider_spec.rb +9 -0
  54. data/spec/unit/spark_api/authentication/oauth2_spec.rb +29 -3
  55. data/spec/unit/spark_api/authentication_spec.rb +4 -10
  56. data/spec/unit/spark_api/configuration/yaml_spec.rb +4 -3
  57. data/spec/unit/spark_api/configuration_spec.rb +22 -8
  58. data/spec/unit/spark_api/models/account_spec.rb +5 -0
  59. data/spec/unit/spark_api/models/base_spec.rb +27 -0
  60. data/spec/unit/spark_api/models/concerns/destroyable_spec.rb +28 -0
  61. data/spec/unit/spark_api/models/concerns/savable_spec.rb +61 -0
  62. data/spec/unit/spark_api/models/contact_spec.rb +5 -5
  63. data/spec/unit/spark_api/models/dirty_spec.rb +46 -0
  64. data/spec/unit/spark_api/models/finders_spec.rb +0 -7
  65. data/spec/unit/spark_api/models/saved_search_spec.rb +34 -3
  66. data/spec/unit/spark_api/models/shared_listing_spec.rb +1 -1
  67. data/spec/unit/spark_api/models/subscription_spec.rb +106 -0
  68. data/spec/unit/spark_api/multi_client_spec.rb +14 -4
  69. data/spec/unit/spark_api/paginate_spec.rb +0 -1
  70. data/spec/unit/spark_api/request_spec.rb +10 -0
  71. data/spec/unit/spark_api_spec.rb +0 -3
  72. metadata +127 -45
  73. data/lib/spark_api/authentication/oauth2_impl/password_provider.rb +0 -24
@@ -0,0 +1,7 @@
1
+ require 'spark_api/models/concerns/savable'
2
+ require 'spark_api/models/concerns/destroyable'
3
+
4
+ module SparkApi
5
+ module Models
6
+ end
7
+ end
@@ -0,0 +1,32 @@
1
+ module SparkApi
2
+ module Models
3
+ module Concerns
4
+
5
+ module Destroyable
6
+
7
+ def destroy(arguments = {})
8
+ self.errors = []
9
+ begin
10
+ return destroy!(arguments)
11
+ rescue BadResourceRequest => e
12
+ self.errors << {:code => e.code, :message => e.message}
13
+ SparkApi.logger.error("Failed to destroy resource #{self}: #{e.message}")
14
+ rescue NotFound => e
15
+ SparkApi.logger.error("Failed to destroy resource #{self}: #{e.message}")
16
+ end
17
+ false
18
+ end
19
+ def destroy!(arguments = {})
20
+ connection.delete("#{self.class.path}/#{self.Id}", arguments)
21
+ @destroyed = true
22
+ true
23
+ end
24
+ alias_method :delete, :destroy # backwards compatibility
25
+
26
+ def destroyed?; @destroyed ? @destroyed : false end
27
+
28
+ end
29
+
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,66 @@
1
+ module SparkApi
2
+ module Models
3
+ module Concerns
4
+
5
+ module Savable
6
+
7
+ def save(arguments = {})
8
+ self.errors = [] # clear the errors hash
9
+ begin
10
+ return save!(arguments)
11
+ rescue BadResourceRequest => e
12
+ self.errors << {:code => e.code, :message => e.message}
13
+ SparkApi.logger.error("Failed to save resource #{self}: #{e.message}")
14
+ rescue NotFound => e
15
+ SparkApi.logger.error("Failed to save resource #{self}: #{e.message}")
16
+ end
17
+ false
18
+ end
19
+ def save!(arguments = {})
20
+ persisted? ? update!(arguments) : create!(arguments)
21
+ end
22
+
23
+ def create!(arguments = {})
24
+ results = connection.post self.class.path, {
25
+ resource_pluralized => [ attributes ]
26
+ }.merge(params_for_save), arguments
27
+
28
+ update_resource_identifiers(results.first)
29
+ reset_dirty
30
+ params_for_save.clear
31
+ true
32
+ end
33
+
34
+ def update!(arguments = {})
35
+ return true unless changed?
36
+ connection.put "#{self.class.path}/#{self.Id}", dirty_attributes, arguments
37
+ reset_dirty
38
+ params_for_save.clear
39
+ true
40
+ end
41
+
42
+ def params_for_save
43
+ @params_for_save ||= {}
44
+ end
45
+
46
+ private
47
+
48
+ def update_resource_identifiers(result)
49
+ attributes['ResourceUri'] = result['ResourceUri']
50
+ attributes['Id'] = result['Id'] ? result['Id'] : parse_id(result['ResourceUri'])
51
+ end
52
+
53
+ # can be overridden
54
+ def resource_pluralized
55
+ resource = self.class.name.split('::').last
56
+ unless resource.split('').last == "s"
57
+ resource = resource + "s"
58
+ end
59
+ resource
60
+ end
61
+
62
+ end
63
+
64
+ end
65
+ end
66
+ end
@@ -2,28 +2,11 @@ module SparkApi
2
2
  module Models
3
3
  class Contact < Base
4
4
  extend Finders
5
+ include Concerns::Savable,
6
+ Concerns::Destroyable
7
+
5
8
  self.element_name="contacts"
6
9
 
7
- def save(arguments={})
8
- self.errors = [] # clear the errors hash
9
- begin
10
- return save!(arguments)
11
- rescue BadResourceRequest => e
12
- self.errors << {:code => e.code, :message => e.message}
13
- SparkApi.logger.error("Failed to save resource #{self}: #{e.message}")
14
- rescue NotFound => e
15
- SparkApi.logger.error("Failed to save resource #{self}: #{e.message}")
16
- end
17
- false
18
- end
19
- def save!(arguments={})
20
- results = connection.post self.class.path, {"Contacts" => [ attributes ], "Notify" => notify? }, arguments
21
- result = results.first
22
- attributes['ResourceUri'] = result['ResourceUri']
23
- attributes['Id'] = parse_id(result['ResourceUri'])
24
- true
25
- end
26
-
27
10
  def self.by_tag(tag_name, arguments={})
28
11
  collect(connection.get("#{path}/tags/#{tag_name}", arguments))
29
12
  end
@@ -37,11 +20,9 @@ module SparkApi
37
20
  end
38
21
 
39
22
  # Notify the agent of contact creation via a Spark notification.
40
- def notify?
41
- @notify == true
42
- end
43
- def notify=(notify_me=true)
44
- @notify = notify_me
23
+ def notify?; params_for_save[:Notify] == true end
24
+ def notify=(notify_me)
25
+ params_for_save[:Notify] = notify_me
45
26
  end
46
27
 
47
28
  end
@@ -0,0 +1,57 @@
1
+ module SparkApi
2
+ module Models
3
+ module Dirty
4
+
5
+ def changed?
6
+ changed.any?
7
+ end
8
+
9
+ def changed
10
+ changed_attributes.keys
11
+ end
12
+
13
+ def changes
14
+ Hash[changed.map { |attr| [attr, attribute_change(attr)] }]
15
+ end
16
+
17
+ def previous_changes
18
+ @previously_changed
19
+ end
20
+
21
+ # hash with changed attributes and their original values
22
+ def changed_attributes
23
+ @changed_attributes ||= {}
24
+ end
25
+
26
+ # hash with changed attributes and their new values
27
+ def dirty_attributes
28
+ changed.inject({}) { |h, k| h[k] = attributes[k]; h }
29
+ end
30
+
31
+ private
32
+
33
+ def reset_dirty
34
+ @previously_changed = changed_attributes
35
+ @changed_attributes.clear
36
+ end
37
+
38
+ def attribute_changed?(attr)
39
+ changed.include?(attr)
40
+ end
41
+
42
+ def attribute_change(attr)
43
+ [changed_attributes[attr], __send__(attr)] if attribute_changed?(attr)
44
+ end
45
+
46
+ def attribute_will_change!(attr)
47
+ begin
48
+ value = __send__(attr)
49
+ value = value.duplicable? ? value.clone : value
50
+ rescue TypeError, NoMethodError; end
51
+
52
+ changed_attributes[attr] = value unless changed.include?(attr)
53
+ end
54
+
55
+ end
56
+ end
57
+ end
@@ -15,10 +15,6 @@ module SparkApi
15
15
  else find_single(scope, options)
16
16
  end
17
17
  end
18
-
19
- def all(*arguments)
20
- find(:all, *arguments)
21
- end
22
18
 
23
19
  def first(*arguments)
24
20
  find(:first, *arguments)
@@ -1,7 +1,11 @@
1
1
  module SparkApi
2
2
  module Models
3
+
3
4
  class SavedSearch < Base
4
5
  extend Finders
6
+ include Concerns::Savable,
7
+ Concerns::Destroyable
8
+
5
9
  self.element_name="savedsearches"
6
10
 
7
11
  def self.provided()
@@ -11,6 +15,12 @@ module SparkApi
11
15
  SparkApi.logger.info("#{self.name}.path: #{provided.path}")
12
16
  end
13
17
  end
18
+
19
+ private
20
+
21
+ def resource_pluralized; "SavedSearches" end
22
+
14
23
  end
24
+
15
25
  end
16
26
  end
@@ -25,6 +25,7 @@ module SparkApi
25
25
 
26
26
  begin
27
27
  datetime = DateTime.strptime(formatted_date, '%m/%d/%YT%l:%M %P')
28
+ dst_offset = 0
28
29
  rescue => ex
29
30
  ; # Do nothing; doesn't matter
30
31
  end
@@ -35,6 +36,7 @@ module SparkApi
35
36
  begin
36
37
  datetime = DateTime.strptime(formatted_date, format)
37
38
  datetime = datetime.new_offset DateTime.now.offset
39
+ dst_offset = Time.now.dst? ? 0 : 1
38
40
  break
39
41
  rescue => ex
40
42
  next
@@ -46,9 +48,11 @@ module SparkApi
46
48
  unless datetime
47
49
  raise ArgumentError.new('invalid date')
48
50
  end
51
+
52
+
49
53
 
50
54
  attributes[time] = Time.local(datetime.year, datetime.month, datetime.day,
51
- datetime.hour, datetime.min, datetime.sec)
55
+ datetime.hour + dst_offset, datetime.min, datetime.sec)
52
56
  end
53
57
  attributes['Date'] = date
54
58
  end
@@ -0,0 +1,52 @@
1
+ module SparkApi
2
+ module Models
3
+
4
+ class Subscription < Base
5
+ extend Finders
6
+ include Concerns::Savable,
7
+ Concerns::Destroyable
8
+
9
+ self.element_name = "subscriptions"
10
+
11
+ # list subscribers (private role)
12
+ def subscribers
13
+ return {} unless persisted?
14
+ results = connection.get("#{self.class.path}/#{@attributes["Id"]}/subscribers")
15
+ @attributes['RecipientIds'] = results.first['RecipientIds']
16
+ results
17
+ end
18
+
19
+ # subscribe/unsubscribe contact (private role)
20
+ [:subscribe, :unsubscribe].each do |action|
21
+ method = (action == :subscribe ? :put : :delete)
22
+ define_method(action) do |contact|
23
+ return false unless persisted?
24
+ self.errors = []
25
+ contact_id = contact.is_a?(Contact) ? contact.Id : contact
26
+ begin
27
+ connection.send(method, "#{self.class.path}/#{@attributes["Id"]}/subscribers/#{contact_id}")
28
+ rescue BadResourceRequest, NotFound => e
29
+ self.errors << { :code => e.code, :message => e.message }
30
+ SparkApi.logger.error("Failed to #{action} contact #{contact}: #{e.message}")
31
+ return false
32
+ end
33
+ update_recipients(action, contact_id)
34
+ true
35
+ end
36
+ end
37
+
38
+ private
39
+
40
+ def update_recipients(method, contact_id)
41
+ @attributes['RecipientIds'] = [] if @attributes['RecipientIds'].nil?
42
+ if method == :subscribe
43
+ @attributes['RecipientIds'] << contact_id
44
+ else
45
+ @attributes['RecipientIds'].delete contact_id
46
+ end
47
+ end
48
+
49
+ end
50
+
51
+ end
52
+ end
@@ -24,7 +24,7 @@ module SparkApi
24
24
  # Hash of the json results as documented in the api.
25
25
  # :raises:
26
26
  # SparkApi::ClientError or subclass if the request failed.
27
- def post(path, body={}, options={})
27
+ def post(path, body = nil, options={})
28
28
  request(:post, path, body, options)
29
29
  end
30
30
 
@@ -37,7 +37,7 @@ module SparkApi
37
37
  # Hash of the json results as documented in the api.
38
38
  # :raises:
39
39
  # SparkApi::ClientError or subclass if the request failed.
40
- def put(path, body={}, options={})
40
+ def put(path, body = nil, options={})
41
41
  request(:put, path, body, options)
42
42
  end
43
43
 
@@ -64,13 +64,13 @@ module SparkApi
64
64
  begin
65
65
  request_opts = {}
66
66
  request_opts.merge!(options)
67
- post_data = body.nil? ? nil : {"D" => body }.to_json
68
67
  request_path = "/#{version}#{path}"
69
68
  start_time = Time.now
70
69
  SparkApi.logger.debug("#{method.to_s.upcase} Request: #{request_path}")
71
- if post_data.nil?
70
+ if [:get, :delete, :head].include?(method.to_sym)
72
71
  response = authenticator.request(method, request_path, nil, request_opts)
73
72
  else
73
+ post_data = process_request_body(body)
74
74
  SparkApi.logger.debug("#{method.to_s.upcase} Data: #{post_data}")
75
75
  response = authenticator.request(method, request_path, post_data, request_opts)
76
76
  end
@@ -89,6 +89,19 @@ module SparkApi
89
89
  raise
90
90
  end
91
91
  response.body
92
+ rescue Faraday::Error::ConnectionFailed => e
93
+ if self.ssl_verify && e.message =~ /certificate verify failed/
94
+ SparkApi.logger.error(SparkApi::Errors.ssl_verification_error)
95
+ end
96
+ raise e
97
+ end
98
+
99
+ def process_request_body(body)
100
+ if body.is_a?(Hash)
101
+ body.empty? ? "{}" : {"D" => body }.to_json
102
+ else
103
+ body
104
+ end
92
105
  end
93
106
 
94
107
  end
@@ -8,43 +8,6 @@ module SparkApi
8
8
  end
9
9
  end
10
10
 
11
- # All known response codes listed in the API
12
- module ResponseCodes
13
- NOT_FOUND = 404
14
- METHOD_NOT_ALLOWED = 405
15
- INVALID_KEY = 1000
16
- DISABLED_KEY = 1010
17
- API_USER_REQUIRED = 1015
18
- SESSION_TOKEN_EXPIRED = 1020
19
- SSL_REQUIRED = 1030
20
- INVALID_JSON = 1035
21
- INVALID_FIELD = 1040
22
- MISSING_PARAMETER = 1050
23
- INVALID_PARAMETER = 1053
24
- CONFLICTING_DATA = 1055
25
- NOT_AVAILABLE= 1500
26
- RATE_LIMIT_EXCEEDED = 1550
27
- end
28
-
29
- # Errors built from API responses
30
- class InvalidResponse < StandardError; end
31
- class ClientError < StandardError
32
- attr_reader :code, :status, :details
33
- def initialize (options = {})
34
- # Support the standard initializer for errors
35
- opts = options.is_a?(Hash) ? options : {:message => options.to_s}
36
- @code = opts[:code]
37
- @status = opts[:status]
38
- @details = opts[:details]
39
- super(opts[:message])
40
- end
41
-
42
- end
43
- class NotFound < ClientError; end
44
- class PermissionDenied < ClientError; end
45
- class NotAllowed < ClientError; end
46
- class BadResourceRequest < ClientError; end
47
-
48
11
  # Nice and handy class wrapper for the api response hash
49
12
  class ApiResponse < ::Array
50
13
  include SparkApi::Response
@@ -14,8 +14,8 @@ SparkApi.configure do |config|
14
14
  config.api_key = "YOUR_CLIENT_ID"
15
15
  config.api_secret = "YOUR_CLIENT_SECRET"
16
16
  config.callback = "YOUR_REDIRECT_URI"
17
- config.auth_endpoint = "https://developers.sparkplatform.com/openid"
18
- config.endpoint = 'https://developers.sparkapi.com'
17
+ config.auth_endpoint = "https://sparkplatform.com/openid"
18
+ config.endpoint = 'https://sparkapi.com'
19
19
  end
20
20
 
21
21
  client = SparkApi.client
@@ -24,7 +24,7 @@ client = SparkApi.client
24
24
  # Step 1:
25
25
  # To get your code to post to /v1/oauth2/grant, send the end user to this URI, replacing the all-capped strings with
26
26
  # the CGI-escaped credentials for your key:
27
- # https://developers.sparkplatform.com/oauth2?response_type=code&client_id=YOUR_CLIENT_ID&redirect_uri=YOUR_REDIRECT_URI
27
+ # https://sparkplatform.com/oauth2?response_type=code&client_id=YOUR_CLIENT_ID&redirect_uri=YOUR_REDIRECT_URI
28
28
  # When the user has finished, they will land at:
29
29
  # YOUR_REDIRECT_URI?code=CODE.
30
30
  puts "Go here and log in to get your code: #{client.authenticator.authorization_url}"