scrapinghub 0.0.2 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile.lock +16 -0
- data/README.md +16 -3
- data/Rakefile +4 -0
- data/lib/scrapinghub/api_method.rb +1 -1
- data/lib/scrapinghub/api_response.rb +2 -0
- data/lib/scrapinghub/scrapinghub.rb +35 -12
- data/lib/scrapinghub/version.rb +1 -1
- data/scrapinghub.gemspec +5 -3
- metadata +28 -12
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: efb25630cbfa2300e582aa732ee45ede8ce2fd0f
|
4
|
+
data.tar.gz: 62d00953d56f4e03265f7e60b0f2d83cf21101b6
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: bc3b458a543a3864daac2de070bae64a056829ee2e369e41de22e4d50404550723c6558ae65c8e82305b5495a4a14c7b2acf95e1b7f5525603c39e573c86ad84
|
7
|
+
data.tar.gz: d2ad323f2381719e169deb53de92d49791a281473b1175aec66f07f8f6059d7f36dab13f6399e95dd7ae068f55e2a56a4a3a6f2a0facdedc903b6518324f3256
|
data/Gemfile.lock
ADDED
data/README.md
CHANGED
@@ -1,7 +1,5 @@
|
|
1
1
|
# Scrapinghub
|
2
2
|
|
3
|
-
TODO: Write a gem description
|
4
|
-
|
5
3
|
## Installation
|
6
4
|
|
7
5
|
Add this line to your application's Gemfile:
|
@@ -16,9 +14,23 @@ Or install it yourself as:
|
|
16
14
|
|
17
15
|
$ gem install scrapinghub
|
18
16
|
|
17
|
+
|
19
18
|
## Usage
|
20
19
|
|
21
|
-
|
20
|
+
Get Job Items
|
21
|
+
|
22
|
+
```ruby
|
23
|
+
Scrapinghub::Scrapinghub.new("SCRAPING HUB API KEY").job_items(project: project_id, job: job_id)
|
24
|
+
```
|
25
|
+
|
26
|
+
##### Avaialbe methods:
|
27
|
+
|
28
|
+
* projects
|
29
|
+
* spiders - required param: { project: project_id }
|
30
|
+
* jobs - required param: { project: project_id }
|
31
|
+
* job - required param: { project: project_id, job: job_id }
|
32
|
+
* job_items - required param: { project: project_id, job: job_id }
|
33
|
+
* spider_items - required param: { project: project_id, spider: spider_id }
|
22
34
|
|
23
35
|
## Contributing
|
24
36
|
|
@@ -27,3 +39,4 @@ TODO: Write usage instructions here
|
|
27
39
|
3. Commit your changes (`git commit -am 'Add some feature'`)
|
28
40
|
4. Push to the branch (`git push origin feature/my-new-feature`)
|
29
41
|
5. Create new Pull Request
|
42
|
+
|
data/Rakefile
CHANGED
@@ -3,19 +3,21 @@ require_relative 'api_method'
|
|
3
3
|
module Scrapinghub
|
4
4
|
class Scrapinghub
|
5
5
|
METHODS = {
|
6
|
-
projects: ApiMethod.new('scrapyd/listprojects', []),
|
7
|
-
spiders: ApiMethod.new('spiders/list', [:project]),
|
8
|
-
jobs: ApiMethod.new('jobs/list', [:project]),
|
9
|
-
job: ApiMethod.new('jobs/list', [:project, :job]),
|
10
|
-
job_items: ApiMethod.new('items', [:project, :job]),
|
11
|
-
spider_items: ApiMethod.new('items', [:project, :spider])
|
6
|
+
projects: { http_type: :get, method: ApiMethod.new('scrapyd/listprojects', []) },
|
7
|
+
spiders: { http_type: :get, method: ApiMethod.new('spiders/list', [:project]) },
|
8
|
+
jobs: { http_type: :get, method: ApiMethod.new('jobs/list', [:project]) },
|
9
|
+
job: { http_type: :get, method: ApiMethod.new('jobs/list', [:project, :job]) },
|
10
|
+
job_items: { http_type: :get, method: ApiMethod.new('items', [:project, :job]) },
|
11
|
+
spider_items: { http_type: :get, method: ApiMethod.new('items', [:project, :spider]) },
|
12
|
+
schedule: { http_type: :post, method: ApiMethod.new('schedule', [:project, :spider]) },
|
12
13
|
}
|
13
14
|
|
14
15
|
attr_reader :api_key
|
15
16
|
|
16
|
-
def initialize(api_key, url='
|
17
|
+
def initialize(api_key, url='https://dash.scrapinghub.com/api/', debug_mode=false)
|
17
18
|
@api_key = api_key
|
18
19
|
@base_url = url
|
20
|
+
@debug_mode = debug_mode
|
19
21
|
end
|
20
22
|
|
21
23
|
def get(method, parameters = {})
|
@@ -28,12 +30,31 @@ module Scrapinghub
|
|
28
30
|
fetch(uri)
|
29
31
|
end
|
30
32
|
|
33
|
+
def post(method, parameters = {})
|
34
|
+
if method.is_a? Symbol
|
35
|
+
uri = get_method_url(method, parameters)
|
36
|
+
else
|
37
|
+
uri = build_url(method, parameters)
|
38
|
+
end
|
39
|
+
|
40
|
+
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
|
41
|
+
request = Net::HTTP::Post.new(uri.request_uri)
|
42
|
+
request.basic_auth @api_key, ''
|
43
|
+
|
44
|
+
request.add_field('Content-Type', 'application/json')
|
45
|
+
request.set_form_data(parameters)
|
46
|
+
response = http.request(request)
|
47
|
+
|
48
|
+
ApiResponse.new(response)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
31
52
|
def fetch(uri, redirect_limit = 5)
|
32
53
|
if redirect_limit <= 0
|
33
54
|
raise "Request redirected too many times."
|
34
55
|
end
|
35
56
|
|
36
|
-
Net::HTTP.start(uri.host, uri.port) do |http|
|
57
|
+
Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
|
37
58
|
request = Net::HTTP::Get.new(uri.request_uri)
|
38
59
|
request.basic_auth @api_key, ''
|
39
60
|
|
@@ -43,8 +64,10 @@ module Scrapinghub
|
|
43
64
|
when Net::HTTPFound
|
44
65
|
new_location = URI(response['location'])
|
45
66
|
|
46
|
-
|
47
|
-
|
67
|
+
if @debug_mode
|
68
|
+
debug "<- #{uri.request_uri} redirects to",
|
69
|
+
"-> #{new_location.request_uri}"
|
70
|
+
end
|
48
71
|
|
49
72
|
fetch(new_location, redirect_limit-1)
|
50
73
|
else
|
@@ -55,7 +78,7 @@ module Scrapinghub
|
|
55
78
|
|
56
79
|
def method_missing(method, *args, &block)
|
57
80
|
if METHODS[method]
|
58
|
-
|
81
|
+
self.public_send(METHODS[method][:http_type], method, *args)
|
59
82
|
else
|
60
83
|
super
|
61
84
|
end
|
@@ -67,7 +90,7 @@ module Scrapinghub
|
|
67
90
|
end
|
68
91
|
|
69
92
|
def get_method_url(method, parameters)
|
70
|
-
api_method = METHODS[method]
|
93
|
+
api_method = METHODS[method][:method]
|
71
94
|
|
72
95
|
return build_url(api_method, parameters)
|
73
96
|
end
|
data/lib/scrapinghub/version.rb
CHANGED
data/scrapinghub.gemspec
CHANGED
@@ -6,14 +6,16 @@ require 'scrapinghub/version'
|
|
6
6
|
Gem::Specification.new do |gem|
|
7
7
|
gem.name = "scrapinghub"
|
8
8
|
gem.version = Scrapinghub::VERSION
|
9
|
-
gem.authors = ["Emil Ahlback"]
|
10
|
-
gem.email = ["e.ahlback@gmail.com"]
|
9
|
+
gem.authors = ["Emil Ahlback", "Chien Kuo"]
|
10
|
+
gem.email = ["e.ahlback@gmail.com", "chien.cc.kuo@gmail.com"]
|
11
11
|
gem.description = %q{Simple interface to Scrapinghub's API}
|
12
12
|
gem.summary = %q{Just a Ruby wrapper for the Scrapinghub API, see docs at: http://help.scrapinghub.com/api.html}
|
13
|
-
gem.homepage = "https://github.com/
|
13
|
+
gem.homepage = "https://github.com/chien/scrapinghub"
|
14
14
|
|
15
15
|
gem.files = `git ls-files`.split($/)
|
16
16
|
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
17
17
|
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
|
18
18
|
gem.require_paths = ["lib"]
|
19
|
+
|
20
|
+
gem.add_runtime_dependency 'json'
|
19
21
|
end
|
metadata
CHANGED
@@ -1,25 +1,41 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: scrapinghub
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
5
|
-
prerelease:
|
4
|
+
version: 0.0.3
|
6
5
|
platform: ruby
|
7
6
|
authors:
|
8
7
|
- Emil Ahlback
|
8
|
+
- Chien Kuo
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date:
|
13
|
-
dependencies:
|
12
|
+
date: 2016-05-17 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: json
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
requirements:
|
18
|
+
- - '>='
|
19
|
+
- !ruby/object:Gem::Version
|
20
|
+
version: '0'
|
21
|
+
type: :runtime
|
22
|
+
prerelease: false
|
23
|
+
version_requirements: !ruby/object:Gem::Requirement
|
24
|
+
requirements:
|
25
|
+
- - '>='
|
26
|
+
- !ruby/object:Gem::Version
|
27
|
+
version: '0'
|
14
28
|
description: Simple interface to Scrapinghub's API
|
15
29
|
email:
|
16
30
|
- e.ahlback@gmail.com
|
31
|
+
- chien.cc.kuo@gmail.com
|
17
32
|
executables: []
|
18
33
|
extensions: []
|
19
34
|
extra_rdoc_files: []
|
20
35
|
files:
|
21
36
|
- .gitignore
|
22
37
|
- Gemfile
|
38
|
+
- Gemfile.lock
|
23
39
|
- LICENSE.txt
|
24
40
|
- README.md
|
25
41
|
- Rakefile
|
@@ -29,28 +45,28 @@ files:
|
|
29
45
|
- lib/scrapinghub/scrapinghub.rb
|
30
46
|
- lib/scrapinghub/version.rb
|
31
47
|
- scrapinghub.gemspec
|
32
|
-
homepage: https://github.com/
|
48
|
+
homepage: https://github.com/chien/scrapinghub
|
33
49
|
licenses: []
|
50
|
+
metadata: {}
|
34
51
|
post_install_message:
|
35
52
|
rdoc_options: []
|
36
53
|
require_paths:
|
37
54
|
- lib
|
38
55
|
required_ruby_version: !ruby/object:Gem::Requirement
|
39
|
-
none: false
|
40
56
|
requirements:
|
41
|
-
- -
|
57
|
+
- - '>='
|
42
58
|
- !ruby/object:Gem::Version
|
43
59
|
version: '0'
|
44
60
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
45
|
-
none: false
|
46
61
|
requirements:
|
47
|
-
- -
|
62
|
+
- - '>='
|
48
63
|
- !ruby/object:Gem::Version
|
49
64
|
version: '0'
|
50
65
|
requirements: []
|
51
66
|
rubyforge_project:
|
52
|
-
rubygems_version:
|
67
|
+
rubygems_version: 2.0.3
|
53
68
|
signing_key:
|
54
|
-
specification_version:
|
55
|
-
summary:
|
69
|
+
specification_version: 4
|
70
|
+
summary: 'Just a Ruby wrapper for the Scrapinghub API, see docs at: http://help.scrapinghub.com/api.html'
|
56
71
|
test_files: []
|
72
|
+
has_rdoc:
|