spark_api 1.4.24 → 1.4.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- NjU2ZTQ1NGNmOTEyYmMzNmY2MWMzMjg2ZmI4YTc3NGNmNTg3NzkzYw==
4
+ YWRmNWJhNzE4N2I3MTU1Y2M5MjFlYWFmMGVkMjlhNjg1MTY2NTgyOA==
5
5
  data.tar.gz: !binary |-
6
- ZGY1YjFkNjBjYTBiOGExZWUwZDIzNzk5ZTMzMGFiMmIzMDg2YzYyNg==
6
+ MWViOWU1YjgzODg2NjgzZWQyOWI2MTcxZThkY2YxNWVhYmY1NTc5MA==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- ZDAxMzg0ZmI0ZjYyMzYxOWMzYTcwYTBiMmNlNzJjNWM2ZTkyMWNjNzYxYWU0
10
- YzEyNjA4NTM2NGJjY2FiNzIxNzFmZWIyZmNiZTM4MWVkMjg0NjIxNjAyYWRm
11
- MDgxN2VhYjU5OWM0NjJjNWU5MGQ1MGViZTUwMzY0OGQ2ODYzNzQ=
9
+ Y2VlNjczMDNmMjQ2NGUwYmY0ZGNmMjFhOTRiZDg4OTJmYzAzYTgyZmE3ZTcz
10
+ Mzc4NzZjYTI2MzBmMjAwOGI2MTIzZmI1MTU4MzVjOGRhMTYyY2NhYTc2YjEy
11
+ NWUzZTNlYzRhMmFlZDhiM2M3NmFhNGVmZTM5ODdiMzcxMGE3NWY=
12
12
  data.tar.gz: !binary |-
13
- MWExMjNiZGRiOTJhMTU1NTg5ZDI0MjNkNmU5NmMyYjc2ZjcwMWU0MGU3MGIz
14
- NTA1MzIzMTY3YjcxZDBlMGQ2YzdlMTg3OTVjMzMzMTQwMTMxZjIwZDViMDAz
15
- MDVjMDZhOGI0MjNkNTA5ZjdjYjUxNzllM2M1ZTBlNzc4NDFlNGU=
13
+ MWQxOWQ0ODM3MTcyMjJkZTBlNTI1MmYyZWVjYzdhNzE2MzQzODNiOGEyZjVk
14
+ ZGZlNzUwNzg4Y2EyN2EyNTAyM2VmOTNiZjVmZjg0MjRjNDk1Y2YwYTAxZWVm
15
+ MzgwOTljMDVhZWI0MTE2YmNiMzIzYzBkYzJlNzFlZDRiYmYwY2Q=
data/VERSION CHANGED
@@ -1 +1 @@
1
- 1.4.24
1
+ 1.4.25
@@ -11,7 +11,7 @@ module SparkApi
11
11
  # valid configuration options
12
12
  VALID_OPTION_KEYS = [:api_key, :api_secret, :api_user, :endpoint,
13
13
  :user_agent, :version, :ssl, :ssl_verify, :oauth2_provider, :authentication_mode,
14
- :auth_endpoint, :callback, :compress, :timeout, :middleware].freeze
14
+ :auth_endpoint, :callback, :compress, :timeout, :middleware, :request_id_chain].freeze
15
15
  OAUTH2_KEYS = [:authorization_uri, :access_uri, :client_id, :client_secret,
16
16
  # Requirements for authorization_code grant type
17
17
  :redirect_uri,
@@ -44,6 +44,7 @@ module SparkApi
44
44
  DEFAULT_COMPRESS = false
45
45
  DEFAULT_TIMEOUT = 5 # seconds
46
46
  DEFAULT_MIDDLEWARE = 'spark_api'
47
+ DEFAULT_REQUEST_ID_CHAIN = nil
47
48
 
48
49
  X_SPARK_API_USER_AGENT = "X-SparkApi-User-Agent"
49
50
 
@@ -78,6 +79,7 @@ module SparkApi
78
79
  self.compress = DEFAULT_COMPRESS
79
80
  self.timeout = DEFAULT_TIMEOUT
80
81
  self.middleware = DEFAULT_MIDDLEWARE
82
+ self.request_id_chain = DEFAULT_REQUEST_ID_CHAIN
81
83
  self
82
84
  end
83
85
  end
@@ -5,6 +5,15 @@ module SparkApi
5
5
  # =Connection
6
6
  # Mixin module for handling http connection information
7
7
  module Connection
8
+ REG_HTTP = /^http:/
9
+ REG_HTTPS = /^https:/
10
+ HTTP_SCHEME = 'http:'
11
+ HTTPS_SCHEME = 'https:'
12
+ ACCEPT_ENCODING = 'Accept-Encoding'
13
+ COMPRESS_ACCEPT_ENCODING = 'gzip, deflate'
14
+ X_REQUEST_ID_CHAIN = 'X-Request-Id-Chain'
15
+ MIME_JSON = 'application/json'
16
+ MIME_RESO = 'application/json, application/xml'
8
17
  # Main connection object for running requests. Bootstraps the Faraday abstraction layer with
9
18
  # our client configuration.
10
19
  def connection(force_ssl = false)
@@ -13,13 +22,17 @@ module SparkApi
13
22
  }
14
23
  if(force_ssl || self.ssl)
15
24
  opts[:ssl] = {:verify => false } unless self.ssl_verify
16
- opts[:url] = @endpoint.sub /^http:/, "https:"
25
+ opts[:url] = @endpoint.sub REG_HTTP, HTTPS_SCHEME
17
26
  else
18
- opts[:url] = @endpoint.sub /^https:/, "http:"
27
+ opts[:url] = @endpoint.sub REG_HTTPS, HTTP_SCHEME
19
28
  end
20
29
 
21
30
  if self.compress
22
- opts[:headers]["Accept-Encoding"] = 'gzip, deflate'
31
+ opts[:headers][ACCEPT_ENCODING] = COMPRESS_ACCEPT_ENCODING
32
+ end
33
+
34
+ if request_id_chain
35
+ opts[:headers][X_REQUEST_ID_CHAIN] = request_id_chain
23
36
  end
24
37
 
25
38
  conn = Faraday.new(opts) do |conn|
@@ -42,8 +55,8 @@ module SparkApi
42
55
 
43
56
  def spark_headers
44
57
  {
45
- :accept => 'application/json',
46
- :content_type => 'application/json',
58
+ :accept => MIME_JSON,
59
+ :content_type => MIME_JSON,
47
60
  :user_agent => Configuration::DEFAULT_USER_AGENT,
48
61
  Configuration::X_SPARK_API_USER_AGENT => user_agent
49
62
  }
@@ -51,7 +64,7 @@ module SparkApi
51
64
 
52
65
  def reso_headers
53
66
  {
54
- :accept => 'application/json, application/xml',
67
+ :accept => MIME_RESO,
55
68
  :user_agent => Configuration::DEFAULT_USER_AGENT,
56
69
  Configuration::X_SPARK_API_USER_AGENT => user_agent
57
70
  }
@@ -21,7 +21,7 @@ module SparkApi
21
21
  # Errors built from API responses
22
22
  class InvalidResponse < StandardError; end
23
23
  class ClientError < StandardError
24
- attr_reader :code, :status, :details, :request_path, :errors
24
+ attr_reader :code, :status, :details, :request_path, :request_id, :errors
25
25
  def initialize (options = {})
26
26
  # Support the standard initializer for errors
27
27
  opts = options.is_a?(Hash) ? options : {:message => options.to_s}
@@ -29,6 +29,7 @@ module SparkApi
29
29
  @status = opts[:status]
30
30
  @details = opts[:details]
31
31
  @request_path = opts[:request_path]
32
+ @request_id = opts[:request_id]
32
33
  @errors = opts[:errors]
33
34
  super(opts[:message])
34
35
  end
@@ -22,7 +22,8 @@ module SparkApi
22
22
  unless body.is_a?(Hash) && body.key?("D")
23
23
  raise InvalidResponse, "The server response could not be understood"
24
24
  end
25
- response = ApiResponse.new body
25
+ request_id = env[:response_headers]['x-request-id']
26
+ response = ApiResponse.new body, request_id
26
27
  paging = response.pagination
27
28
 
28
29
  if paging.nil?
@@ -38,6 +39,7 @@ module SparkApi
38
39
 
39
40
  error_hash = {
40
41
  :request_path => env[:url],
42
+ :request_id => request_id,
41
43
  :message => response.message,
42
44
  :code => response.code,
43
45
  :status => env[:status],
@@ -79,7 +79,7 @@ module SparkApi
79
79
  response = authenticator.request(method, request_path, post_data, request_opts)
80
80
  end
81
81
  request_time = Time.now - start_time
82
- SparkApi.logger.debug { "[#{(request_time * 1000).to_i}ms] Api: #{method.to_s.upcase} #{request_path}" }
82
+ SparkApi.logger.debug { "[#{(request_time * 1000).to_i}ms] Api: #{method.to_s.upcase} #{request_path} request_id=#{response.headers['x-request-id']}" }
83
83
  rescue PermissionDenied => e
84
84
  if(ResponseCodes::SESSION_TOKEN_EXPIRED == e.code)
85
85
  unless (attempts +=1) > 1
@@ -1,7 +1,7 @@
1
1
  module SparkApi
2
2
  # API Response interface
3
3
  module Response
4
- ATTRIBUTES = [:code, :message, :results, :success, :pagination, :details, :d, :errors, :sparkql_errors]
4
+ ATTRIBUTES = [:code, :message, :results, :success, :pagination, :details, :d, :errors, :sparkql_errors, :request_id]
5
5
  attr_accessor *ATTRIBUTES
6
6
  def success?
7
7
  @success
@@ -10,21 +10,31 @@ module SparkApi
10
10
 
11
11
  # Nice and handy class wrapper for the api response hash
12
12
  class ApiResponse < ::Array
13
+ MAGIC_D = 'D'
14
+ MESSAGE = 'Message'
15
+ CODE = 'Code'
16
+ RESULTS = 'Results'
17
+ SUCCESS = 'Success'
18
+ PAGINATION = 'Pagination'
19
+ DETAILS = 'Details'
20
+ ERRORS = 'Errors'
21
+ SPARKQL_ERRORS = 'SparkQLErrors'
13
22
  include SparkApi::Response
14
- def initialize(d)
23
+ def initialize d, request_id=nil
15
24
  begin
16
- self.d = d["D"]
25
+ self.d = d[MAGIC_D]
17
26
  if self.d.nil? || self.d.empty?
18
27
  raise InvalidResponse, "The server response could not be understood"
19
28
  end
20
- self.message = self.d["Message"]
21
- self.code = self.d["Code"]
22
- self.results = Array(self.d["Results"])
23
- self.success = self.d["Success"]
24
- self.pagination = self.d["Pagination"]
25
- self.details = self.d["Details"] || []
26
- self.errors = self.d["Errors"]
27
- self.sparkql_errors = self.d['SparkQLErrors']
29
+ self.message = self.d[MESSAGE]
30
+ self.code = self.d[CODE]
31
+ self.results = Array(self.d[RESULTS])
32
+ self.success = self.d[SUCCESS]
33
+ self.pagination = self.d[PAGINATION]
34
+ self.details = self.d[DETAILS] || []
35
+ self.errors = self.d[ERRORS]
36
+ self.sparkql_errors = self.d[SPARKQL_ERRORS]
37
+ self.request_id = request_id
28
38
  super(results)
29
39
  rescue Exception => e
30
40
  SparkApi.logger.error "Unable to understand the response! #{d}"
@@ -13,6 +13,7 @@ describe SparkApi::Client, "Client config" do
13
13
  SparkApi.api_key = "my_api_key"
14
14
  SparkApi.api_key.should match("my_api_key")
15
15
  SparkApi.timeout.should eq(5)
16
+ SparkApi.request_id_chain.should be_nil
16
17
  end
17
18
  end
18
19
 
@@ -23,7 +24,8 @@ describe SparkApi::Client, "Client config" do
23
24
  :api_user => "1234",
24
25
  :auth_endpoint => "https://login.wade.dev.fbsdata.com",
25
26
  :endpoint => "http://api.wade.dev.fbsdata.com",
26
- :timeout => 15)
27
+ :timeout => 15,
28
+ :request_id_chain => 'foobar')
27
29
 
28
30
  client.api_key.should match("key_of_wade")
29
31
  client.api_secret.should match("TopSecret")
@@ -32,6 +34,7 @@ describe SparkApi::Client, "Client config" do
32
34
  client.endpoint.should match("http://api.wade.dev.fbsdata.com")
33
35
  client.version.should match("v1")
34
36
  client.timeout.should eq(15)
37
+ client.request_id_chain.should eq('foobar')
35
38
  end
36
39
 
37
40
  it "should allow unverified ssl certificates when verification is off" do
@@ -126,11 +129,14 @@ describe SparkApi::Client, "Client config" do
126
129
  config.version = "veleventy"
127
130
  config.endpoint = "test.api.sparkapi.com"
128
131
  config.user_agent = "my useragent"
132
+ config.request_id_chain = 'foobar'
129
133
  end
130
134
 
131
135
  SparkApi.api_key.should match("my_key")
136
+ SparkApi.request_id_chain.should eq("foobar")
132
137
  SparkApi.reset
133
138
  SparkApi.api_key.should == SparkApi::Configuration::DEFAULT_API_KEY
139
+ SparkApi.request_id_chain.should SparkApi::Configuration::DEFAULT_REQUEST_ID_CHAIN
134
140
 
135
141
  end
136
142
  end
@@ -184,6 +190,31 @@ describe SparkApi::Client, "Client config" do
184
190
  })
185
191
  end
186
192
 
193
+ it "should pass along the request_id_chain header if set" do
194
+ reset_config
195
+ stub_auth_request
196
+ stub_request(:get, "#{SparkApi.endpoint}/#{SparkApi.version}/headers").
197
+ with(:query => {
198
+ :ApiUser => "foobar",
199
+ :ApiSig => "717a066c4f4302c5ca9507e484db4812",
200
+ :AuthToken => "c401736bf3d3f754f07c04e460e09573"
201
+ }).
202
+ to_return(:body => '{"D":{"Success": true,"Results": []}}')
203
+ SparkApi.configure do |config|
204
+ config.user_agent = "my useragent"
205
+ config.request_id_chain = 'foobar'
206
+ end
207
+ SparkApi.client.get '/headers'
208
+ WebMock.should have_requested(:get, "#{SparkApi.endpoint}/#{SparkApi.version}/headers?ApiUser=foobar&ApiSig=717a066c4f4302c5ca9507e484db4812&AuthToken=c401736bf3d3f754f07c04e460e09573").
209
+ with(:headers => {
210
+ 'User-Agent' => SparkApi::Configuration::DEFAULT_USER_AGENT,
211
+ SparkApi::Configuration::X_SPARK_API_USER_AGENT => "my useragent",
212
+ 'Accept'=>'application/json',
213
+ 'Content-Type'=>'application/json',
214
+ 'X-Request-Id-Chain' => 'foobar'
215
+ })
216
+ end
217
+
187
218
  it "should not set gzip header by default" do
188
219
  c = SparkApi::Client.new(:endpoint => "https://sparkapi.com")
189
220
  c.connection.headers["Accept-Encoding"].should be_nil
@@ -2,7 +2,7 @@ require './spec/spec_helper'
2
2
 
3
3
  describe SparkApi do
4
4
  describe SparkApi::ClientError do
5
- subject { SparkApi::ClientError.new({:message=>"OMG FAIL", :code=>1234, :status=>500, :request_path => '/v1/foo'}) }
5
+ subject { SparkApi::ClientError.new({:message=>"OMG FAIL", :code=>1234, :status=>500, :request_path => '/v1/foo', :request_id => 'deadbeef'}) }
6
6
  it "should print a helpful to_s" do
7
7
  subject.to_s.should == "OMG FAIL"
8
8
  subject.message.should == "OMG FAIL"
@@ -18,6 +18,10 @@ describe SparkApi do
18
18
  subject.request_path.should == '/v1/foo'
19
19
  end
20
20
 
21
+ it "should have a request_id" do
22
+ subject.request_id.should == 'deadbeef'
23
+ end
24
+
21
25
  it "should raise and exception with attached message" do
22
26
  expect { raise subject.class, {:message=>"My Message", :code=>1000, :status=>404}}.to raise_error(SparkApi::ClientError) do |e|
23
27
  e.message.should == "My Message"
@@ -38,6 +42,7 @@ describe SparkApi do
38
42
  e.message.should == "My Message"
39
43
  e.code.should be == nil
40
44
  e.status.should be == nil
45
+ e.request_id.should be == nil
41
46
  end
42
47
  end
43
48
  end
@@ -51,6 +56,13 @@ describe SparkApi do
51
56
  r = SparkApi::ApiResponse.new({"D"=>{"Success" => true, "Results" => []}})
52
57
  r.success?.should be(true)
53
58
  r.results.empty?.should be(true)
59
+ r.request_id.should eq nil
60
+ end
61
+
62
+ it "should return the request_id" do
63
+ r = SparkApi::ApiResponse.new({"D"=>{"Success" => true, "Results" => []}}, 'foobar')
64
+ r.success?.should be(true)
65
+ r.request_id.should eq('foobar')
54
66
  end
55
67
  it "should have a message on error" do
56
68
  r = SparkApi::ApiResponse.new({"D"=>{"Success" => false, "Message" => "I am a failure."}})
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: spark_api
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.4.24
4
+ version: 1.4.25
5
5
  platform: ruby
6
6
  authors:
7
7
  - Brandon Hornseth
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2018-06-25 00:00:00.000000000 Z
12
+ date: 2018-06-26 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: faraday