scrapinghub-client 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (61) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +1 -0
  3. data/.rspec +3 -0
  4. data/.travis.yml +6 -0
  5. data/Gemfile +5 -0
  6. data/LICENSE +21 -0
  7. data/README.md +46 -0
  8. data/Rakefile +22 -0
  9. data/lib/scrapinghub-client.rb +3 -0
  10. data/lib/scrapinghub/jobs.rb +175 -0
  11. data/lib/scrapinghub/version.rb +3 -0
  12. data/scrapinghub.gemspec +26 -0
  13. data/spec/fixtures/vcr_cassettes/jobs/delete/bad_auth.yml +38 -0
  14. data/spec/fixtures/vcr_cassettes/jobs/delete/job/invalid.yml +38 -0
  15. data/spec/fixtures/vcr_cassettes/jobs/delete/job/multiple.yml +38 -0
  16. data/spec/fixtures/vcr_cassettes/jobs/delete/job/single.yml +38 -0
  17. data/spec/fixtures/vcr_cassettes/jobs/delete/project/invalid.yml +39 -0
  18. data/spec/fixtures/vcr_cassettes/jobs/delete/project/valid.yml +38 -0
  19. data/spec/fixtures/vcr_cassettes/jobs/list/bad_auth.yml +38 -0
  20. data/spec/fixtures/vcr_cassettes/jobs/list/count/3.yml +53 -0
  21. data/spec/fixtures/vcr_cassettes/jobs/list/has_tag/invalid.yml +38 -0
  22. data/spec/fixtures/vcr_cassettes/jobs/list/has_tag/multiple.yml +53 -0
  23. data/spec/fixtures/vcr_cassettes/jobs/list/has_tag/single.yml +48 -0
  24. data/spec/fixtures/vcr_cassettes/jobs/list/job/invalid.yml +38 -0
  25. data/spec/fixtures/vcr_cassettes/jobs/list/job/multiple.yml +48 -0
  26. data/spec/fixtures/vcr_cassettes/jobs/list/job/single.yml +43 -0
  27. data/spec/fixtures/vcr_cassettes/jobs/list/lacks_tag/invalid.yml +81 -0
  28. data/spec/fixtures/vcr_cassettes/jobs/list/lacks_tag/multiple.yml +67 -0
  29. data/spec/fixtures/vcr_cassettes/jobs/list/lacks_tag/single.yml +72 -0
  30. data/spec/fixtures/vcr_cassettes/jobs/list/project/invalid.yml +39 -0
  31. data/spec/fixtures/vcr_cassettes/jobs/list/project/valid.yml +81 -0
  32. data/spec/fixtures/vcr_cassettes/jobs/list/spider/invalid.yml +38 -0
  33. data/spec/fixtures/vcr_cassettes/jobs/list/spider/valid.yml +62 -0
  34. data/spec/fixtures/vcr_cassettes/jobs/list/state/finished.yml +81 -0
  35. data/spec/fixtures/vcr_cassettes/jobs/list/state/pending.yml +38 -0
  36. data/spec/fixtures/vcr_cassettes/jobs/schedule/bad_auth.yml +38 -0
  37. data/spec/fixtures/vcr_cassettes/jobs/schedule/project/invalid.yml +39 -0
  38. data/spec/fixtures/vcr_cassettes/jobs/schedule/spider/add_tag.yml +38 -0
  39. data/spec/fixtures/vcr_cassettes/jobs/schedule/spider/already-running.yml +39 -0
  40. data/spec/fixtures/vcr_cassettes/jobs/schedule/spider/extra.yml +38 -0
  41. data/spec/fixtures/vcr_cassettes/jobs/schedule/spider/minimal.yml +38 -0
  42. data/spec/fixtures/vcr_cassettes/jobs/schedule/spider/priority.yml +38 -0
  43. data/spec/fixtures/vcr_cassettes/jobs/stop/bad_auth.yml +38 -0
  44. data/spec/fixtures/vcr_cassettes/jobs/stop/job/already-stopped.yml +38 -0
  45. data/spec/fixtures/vcr_cassettes/jobs/stop/job/invalid.yml +38 -0
  46. data/spec/fixtures/vcr_cassettes/jobs/stop/job/valid.yml +38 -0
  47. data/spec/fixtures/vcr_cassettes/jobs/stop/project/invalid.yml +39 -0
  48. data/spec/fixtures/vcr_cassettes/jobs/update/bad_auth.yml +38 -0
  49. data/spec/fixtures/vcr_cassettes/jobs/update/has_tag.yml +38 -0
  50. data/spec/fixtures/vcr_cassettes/jobs/update/job.yml +38 -0
  51. data/spec/fixtures/vcr_cassettes/jobs/update/lacks_tag.yml +38 -0
  52. data/spec/fixtures/vcr_cassettes/jobs/update/no-query-filters.yml +38 -0
  53. data/spec/fixtures/vcr_cassettes/jobs/update/no-update-params.yml +38 -0
  54. data/spec/fixtures/vcr_cassettes/jobs/update/project/invalid.yml +39 -0
  55. data/spec/fixtures/vcr_cassettes/jobs/update/spider.yml +38 -0
  56. data/spec/fixtures/vcr_cassettes/jobs/update/state.yml +38 -0
  57. data/spec/integration/jobs_spec.rb +567 -0
  58. data/spec/spec_helper.rb +22 -0
  59. data/spec/unit/jobs_spec.rb +188 -0
  60. data/spec/unit/scrapinghub_spec.rb +8 -0
  61. metadata +200 -0
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 5388fb17c08f77b3091bdb87b8be779237f21404
4
+ data.tar.gz: 3756c4dcaefbf715608a9d958beb986f18b6f8da
5
+ SHA512:
6
+ metadata.gz: 457ceddece99e63f370213e5b17a667d414d315f750cc7ff675dc98e7ce56f179c03fa84b9d73c6951511a6adb2e68c80ef28bfccfe3fa9cc7ea19ec305a05e6
7
+ data.tar.gz: ed0dcb6729211f0dc44e57ebe047e71ccc8ea87b24b21e1b7d30ca6be223c3f361f61e905ff005c15ce3d9ef9d38f96e60513e97c694db9bad92f3b62407f38d
@@ -0,0 +1 @@
1
+ Gemfile.lock
data/.rspec ADDED
@@ -0,0 +1,3 @@
1
+ --tty
2
+ --color
3
+ --profile
@@ -0,0 +1,6 @@
1
+ language: ruby
2
+ rvm:
3
+ - "2.2"
4
+ - "2.2.0"
5
+ - "2.1.0"
6
+ - ruby-head
data/Gemfile ADDED
@@ -0,0 +1,5 @@
1
+ source "https://rubygems.org"
2
+
3
+ gemspec
4
+
5
+ gem "codeclimate-test-reporter", require: nil
data/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2015 Abe Voelker
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,46 @@
1
+ Scrapinghub client
2
+ ==================
3
+
4
+ [![Build Status](https://travis-ci.org/abevoelker/scrapinghub-client.svg?branch=master)][travis]
5
+ [![Dependency Status](https://gemnasium.com/abevoelker/scrapinghub-client.svg)][gemnasium]
6
+ [![Code Climate](https://codeclimate.com/github/abevoelker/scrapinghub-client/badges/gpa.svg)][codeclimate]
7
+ [![Test Coverage](https://codeclimate.com/github/abevoelker/scrapinghub-client/badges/coverage.svg)][codeclimate]
8
+
9
+ Ruby client for the [Scrapinghub API][api]. So far it only supports the [Jobs API][jobs-api] (pull requests welcome).
10
+
11
+ This library tries to take an FP-ish approach. It uses the [contracts gem][contracts] for validating function input and output types (see the [docs][] for the full list of functions and their types) and the [kleisli gem][kleisli] for returning composition-friendly output types. Outputs will be a `Try::Failure` if an exception was raised (e.g. a network timeout), a `Left` if the Scrapinghub API returns failure, or a `Right` if the operation was successful.
12
+
13
+ The Kleisli gem [introductory blog post][kleisli-blog] gives some great examples on how to work with the output types.
14
+
15
+ Install
16
+ --------
17
+
18
+ Add to Gemfile:
19
+
20
+ ```
21
+ gem "scrapinghub-client"
22
+ ```
23
+
24
+ **Note**: although the gem is named `scrapinghub-client`, the gem's namespace is `Scrapinghub`.
25
+
26
+ Example
27
+ --------
28
+
29
+ ```ruby
30
+ require "scrapinghub-client"
31
+
32
+ j = Scrapinghub::Jobs.new(api_key: 'abc123')
33
+ j.schedule(project: 123, spider: "foo", add_tag: "bar", extra: { DOWNLOAD_DELAY: "0.5" })
34
+ .fmap{|r| puts "Job scheduled! Jobid: #{r['jobid']}"}
35
+ .or{|f| puts "Failed to schedule job! Reason: #{f.inspect}"}
36
+ ```
37
+
38
+ [travis]: https://travis-ci.org/abevoelker/scrapinghub-client
39
+ [gemnasium]: https://gemnasium.com/abevoelker/scrapinghub-client
40
+ [codeclimate]: https://codeclimate.com/github/abevoelker/scrapinghub-client
41
+ [api]: http://doc.scrapinghub.com/api.html
42
+ [jobs-api]: http://doc.scrapinghub.com/jobs.html
43
+ [docs]: http://www.rubydoc.info/github/abevoelker/scrapinghub-client/master/Scrapinghub/Jobs
44
+ [contracts]: https://github.com/egonSchiele/contracts.ruby/blob/master/TUTORIAL.md
45
+ [kleisli]: https://github.com/txus/kleisli
46
+ [kleisli-blog]: http://thoughts.codegram.com/cleaner-safer-ruby-api-clients-with-kleisli/
@@ -0,0 +1,22 @@
1
+ require "rspec"
2
+ require "rspec/core/rake_task"
3
+ require "rake/testtask"
4
+ require "bundler"
5
+
6
+ Rake::TestTask.new("spec:unit") do |t|
7
+ t.libs << ["lib", "spec"]
8
+ t.pattern = "spec/unit/**/*spec.rb"
9
+ end
10
+
11
+ RSpec::Core::RakeTask.new("spec:integration") do |t|
12
+ t.pattern = "spec/integration/**/*spec.rb"
13
+ end
14
+
15
+ Rake::TestTask.new("spec") do |t|
16
+ t.libs << ["lib", "spec"]
17
+ t.pattern = "spec/**/*spec.rb"
18
+ end
19
+
20
+ task :default => "spec"
21
+
22
+ Bundler::GemHelper.install_tasks
@@ -0,0 +1,3 @@
1
+ require "scrapinghub/version"
2
+ require "scrapinghub/jobs"
3
+ ScrapingHub = Scrapinghub
@@ -0,0 +1,175 @@
1
+ require "contracts"
2
+ require "kleisli"
3
+ require "httparty"
4
+
5
+ module Scrapinghub
6
+ class Jobs
7
+ include Contracts
8
+ include HTTParty
9
+ disable_rails_query_string_format
10
+ base_uri "dash.scrapinghub.com"
11
+
12
+ Contract ({ :api_key => String }) => Any
13
+ # Initialize a new Jobs API client
14
+ #
15
+ # @param api_key [String] Scrapinghub API key
16
+ #
17
+ # @return Object
18
+ def initialize(api_key:)
19
+ @api_key = api_key
20
+ end
21
+
22
+ Contract KeywordArgs[:project => Nat,
23
+ :job => Optional[Or[String, ArrayOf[String]]],
24
+ :spider => Optional[String],
25
+ :state => Optional[Or["pending", "running", "finished"]],
26
+ :has_tag => Optional[Or[String, ArrayOf[String]]],
27
+ :lacks_tag => Optional[Or[String, ArrayOf[String]]],
28
+ :count => Optional[Nat] ] => Or[Kleisli::Try, Kleisli::Either]
29
+ # Retrieve information about jobs.
30
+ #
31
+ # @param project [Fixnum] the project's numeric ID
32
+ # @param job [String, Array<String>] (optional) ID(s) of specific jobs to
33
+ # retrieve
34
+ # @param spider [String] (optional) a spider name (only jobs belonging to
35
+ # this spider will be returned)
36
+ # @param state [String] (optional) return only jobs with this state. Valid
37
+ # values: "pending", "running", "finished"
38
+ # @param has_tag [String, Array<String>] (optional) return only jobs
39
+ # containing the given tag(s)
40
+ # @param lacks_tag [String, Array<String>] (optional) return only jobs not
41
+ # containing the given tag(s)
42
+ # @param count [Fixnum] (optional) maximum number of jobs to return
43
+ #
44
+ # @return [Kleisli::Try, Kleisli::Either] a Kleisli::Try if a low-level
45
+ # exception is raised (e.g. the host is down), a Kleisli::Either::Left
46
+ # if validation fails (e.g. bad authentication), or a
47
+ # Kleisli::Either::Right if everything was successful.
48
+ def list(args)
49
+ options = { query: args, basic_auth: { username: @api_key } }
50
+ Try { self.class.get("/api/jobs/list.json", options) } >-> response {
51
+ if response.code == 200
52
+ Right(response)
53
+ else
54
+ Left(response)
55
+ end
56
+ }
57
+ end
58
+
59
+ Contract KeywordArgs[:project => Nat,
60
+ :spider => String,
61
+ :add_tag => Optional[Or[String, ArrayOf[String]]],
62
+ :priority => Optional[Or[0, 1, 2, 3, 4]],
63
+ :extra => Optional[HashOf[Symbol => String]] ] => Or[Kleisli::Try, Kleisli::Either]
64
+ # Schedule a job.
65
+ #
66
+ # @param project [Fixnum] the project's numeric ID
67
+ # @param spider [String] the spider name
68
+ # @param add_tag [String, Array<String>] (optional) add tag(s) to the job
69
+ # @param priority [Fixnum] (optional) set the job priority: possible values
70
+ # range from 0 (lowest priority) to 4 (highest priority), default is 2
71
+ # @param extra [Hash] (optional) extra parameters passed as spider
72
+ # arguments
73
+ #
74
+ # @return [Kleisli::Try, Kleisli::Either] a Kleisli::Try if a low-level
75
+ # exception is raised (e.g. the host is down), a Kleisli::Either::Left
76
+ # if validation fails (e.g. bad authentication), or a
77
+ # Kleisli::Either::Right if everything was successful.
78
+ def schedule(args)
79
+ extra = args.delete(:extra) || {}
80
+ options = { body: args.merge(extra), basic_auth: { username: @api_key } }
81
+ Try { self.class.post("/api/schedule.json", options) } >-> response {
82
+ if response.code == 200
83
+ Right(response)
84
+ else
85
+ Left(response)
86
+ end
87
+ }
88
+ end
89
+
90
+ Contract KeywordArgs[:project => Nat,
91
+ :job => Optional[Or[String, ArrayOf[String]]],
92
+ :spider => Optional[String],
93
+ :state => Optional[Or["pending", "running", "finished"]],
94
+ :has_tag => Optional[Or[String, ArrayOf[String]]],
95
+ :lacks_tag => Optional[Or[String, ArrayOf[String]]],
96
+ :add_tag => Optional[Or[String, ArrayOf[String]]],
97
+ :remove_tag => Optional[Or[String, ArrayOf[String]]] ] => Or[Kleisli::Try, Kleisli::Either]
98
+ # Update information about jobs.
99
+ #
100
+ # @param project [Fixnum] the project's numeric ID
101
+ # @param job [String, Array<String>] (optional) ID(s) of specific jobs to
102
+ # update
103
+ # @param spider [String] (optional) query on spider name to update
104
+ # @param state [String] (optional) query on jobs with this state to update.
105
+ # Valid values: "pending", "running", "finished"
106
+ # @param has_tag [String, Array<String>] (optional) query on jobs
107
+ # containing the given tag(s) to update
108
+ # @param lacks_tag [String, Array<String>] (optional) query on jobs not
109
+ # containing the given tag(s) to update
110
+ # @param add_tag [String, Array<String>] (optional) tag(s) to add to the
111
+ # queried jobs
112
+ # @param remove_tag [String, Array<String>] (optional) tag(s) to remove
113
+ # from the queried jobs
114
+ #
115
+ # @return [Kleisli::Try, Kleisli::Either] a Kleisli::Try if a low-level
116
+ # exception is raised (e.g. the host is down), a Kleisli::Either::Left
117
+ # if validation fails (e.g. bad authentication), or a
118
+ # Kleisli::Either::Right if everything was successful.
119
+ def update(args)
120
+ options = { body: args, basic_auth: { username: @api_key } }
121
+ Try { self.class.post("/api/jobs/update.json", options) } >-> response {
122
+ if response.code == 200
123
+ Right(response)
124
+ else
125
+ Left(response)
126
+ end
127
+ }
128
+ end
129
+
130
+ Contract KeywordArgs[:project => Nat,
131
+ :job => Or[String, ArrayOf[String]] ] => Or[Kleisli::Try, Kleisli::Either]
132
+ # Delete one or more jobs.
133
+ #
134
+ # @param project [Fixnum] the project's numeric ID
135
+ # @param job [String, Array<String>] the ID of a specific job to delete
136
+ #
137
+ # @return [Kleisli::Try, Kleisli::Either] a Kleisli::Try if a low-level
138
+ # exception is raised (e.g. the host is down), a Kleisli::Either::Left
139
+ # if validation fails (e.g. bad authentication), or a
140
+ # Kleisli::Either::Right if everything was successful.
141
+ def delete(args)
142
+ options = { body: args, basic_auth: { username: @api_key } }
143
+ Try { self.class.post("/api/jobs/delete.json", options) } >-> response {
144
+ if response.code == 200
145
+ Right(response)
146
+ else
147
+ Left(response)
148
+ end
149
+ }
150
+ end
151
+
152
+ Contract KeywordArgs[:project => Nat,
153
+ :job => String ] => Or[Kleisli::Try, Kleisli::Either]
154
+ # Stop one or more running jobs.
155
+ #
156
+ # @param project [Fixnum] the project's numeric ID
157
+ # @param job [String] the ID of a job to stop
158
+ #
159
+ # @return [Kleisli::Try, Kleisli::Either] a Kleisli::Try if a low-level
160
+ # exception is raised (e.g. the host is down), a Kleisli::Either::Left
161
+ # if validation fails (e.g. bad authentication), or a
162
+ # Kleisli::Either::Right if everything was successful.
163
+ def stop(args)
164
+ options = { body: args, basic_auth: { username: @api_key } }
165
+ Try { self.class.post("/api/jobs/stop.json", options) } >-> response {
166
+ if response.code == 200
167
+ Right(response)
168
+ else
169
+ Left(response)
170
+ end
171
+ }
172
+ end
173
+
174
+ end
175
+ end
@@ -0,0 +1,3 @@
1
+ module Scrapinghub
2
+ VERSION = "0.0.1"
3
+ end
@@ -0,0 +1,26 @@
1
+ # -*- encoding: utf-8 -*-
2
+ require File.expand_path("../lib/scrapinghub/version", __FILE__)
3
+
4
+ Gem::Specification.new do |s|
5
+ s.name = "scrapinghub-client"
6
+ s.version = Scrapinghub::VERSION
7
+ s.authors = ["Abe Voelker"]
8
+ s.email = "abe@abevoelker.com"
9
+ s.homepage = "https://github.com/abevoelker/scrapinghub-client"
10
+ s.summary = %q{Ruby client for Scrapinghub API}
11
+ s.description = s.summary
12
+ s.license = "MIT"
13
+
14
+ s.require_paths = ["lib"]
15
+ s.files = `git ls-files`.split("\n")
16
+ s.test_files = `git ls-files -- {spec}/*`.split("\n")
17
+
18
+ s.add_dependency "contracts", "~> 0.11"
19
+ s.add_dependency "kleisli"
20
+ s.add_dependency "httparty"
21
+
22
+ s.add_development_dependency "rake"
23
+ s.add_development_dependency "rspec"
24
+ s.add_development_dependency "webmock"
25
+ s.add_development_dependency "vcr"
26
+ end
@@ -0,0 +1,38 @@
1
+ ---
2
+ http_interactions:
3
+ - request:
4
+ method: post
5
+ uri: http://XXX:@dash.scrapinghub.com/api/jobs/delete.json
6
+ body:
7
+ encoding: UTF-8
8
+ string: job=1%2F1%2F7&project=1
9
+ headers:
10
+ Accept-Encoding:
11
+ - gzip;q=1.0,deflate;q=0.6,identity;q=0.3
12
+ Accept:
13
+ - "*/*"
14
+ User-Agent:
15
+ - Ruby
16
+ response:
17
+ status:
18
+ code: 403
19
+ message: FORBIDDEN
20
+ headers:
21
+ Server:
22
+ - nginx/1.8.0
23
+ Date:
24
+ - Sat, 25 Jul 2015 23:28:52 GMT
25
+ Content-Type:
26
+ - application/json
27
+ Transfer-Encoding:
28
+ - chunked
29
+ Connection:
30
+ - keep-alive
31
+ Vary:
32
+ - Cookie
33
+ body:
34
+ encoding: ASCII-8BIT
35
+ string: '{"status": "error", "message": "Authentication failed"}'
36
+ http_version:
37
+ recorded_at: Sat, 25 Jul 2015 23:28:52 GMT
38
+ recorded_with: VCR 2.9.3
@@ -0,0 +1,38 @@
1
+ ---
2
+ http_interactions:
3
+ - request:
4
+ method: post
5
+ uri: http://XXX:@dash.scrapinghub.com/api/jobs/delete.json
6
+ body:
7
+ encoding: UTF-8
8
+ string: job=1%2F1%2F123&project=1
9
+ headers:
10
+ Accept-Encoding:
11
+ - gzip;q=1.0,deflate;q=0.6,identity;q=0.3
12
+ Accept:
13
+ - "*/*"
14
+ User-Agent:
15
+ - Ruby
16
+ response:
17
+ status:
18
+ code: 200
19
+ message: OK
20
+ headers:
21
+ Server:
22
+ - nginx/1.8.0
23
+ Date:
24
+ - Sat, 25 Jul 2015 23:26:42 GMT
25
+ Content-Type:
26
+ - application/json
27
+ Transfer-Encoding:
28
+ - chunked
29
+ Connection:
30
+ - keep-alive
31
+ Vary:
32
+ - Cookie
33
+ body:
34
+ encoding: ASCII-8BIT
35
+ string: '{"status": "ok", "count": 0}'
36
+ http_version:
37
+ recorded_at: Sat, 25 Jul 2015 23:26:43 GMT
38
+ recorded_with: VCR 2.9.3
@@ -0,0 +1,38 @@
1
+ ---
2
+ http_interactions:
3
+ - request:
4
+ method: post
5
+ uri: http://XXX:@dash.scrapinghub.com/api/jobs/delete.json
6
+ body:
7
+ encoding: US-ASCII
8
+ string: job=1%2F1%2F7&job=1%2F1%2F8&project=1
9
+ headers:
10
+ Accept-Encoding:
11
+ - gzip;q=1.0,deflate;q=0.6,identity;q=0.3
12
+ Accept:
13
+ - "*/*"
14
+ User-Agent:
15
+ - Ruby
16
+ response:
17
+ status:
18
+ code: 200
19
+ message: OK
20
+ headers:
21
+ Server:
22
+ - nginx/1.8.0
23
+ Date:
24
+ - Sat, 25 Jul 2015 23:24:28 GMT
25
+ Content-Type:
26
+ - application/json
27
+ Transfer-Encoding:
28
+ - chunked
29
+ Connection:
30
+ - keep-alive
31
+ Vary:
32
+ - Cookie
33
+ body:
34
+ encoding: ASCII-8BIT
35
+ string: '{"status": "ok", "count": 2}'
36
+ http_version:
37
+ recorded_at: Sat, 25 Jul 2015 23:24:28 GMT
38
+ recorded_with: VCR 2.9.3