http_crawler 0.2.0 → 0.2.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: f034023a8c50c41be3d4e423d39fa2ad44e75930
4
- data.tar.gz: 745c8a86a328387f8b8c7ef68e351c5d63d4d61a
3
+ metadata.gz: 51fa116a088c7a5d065b9dc4ed70185f85102ed6
4
+ data.tar.gz: 711b59b48c0c782d24ffed5d76d88c82e011d17b
5
5
  SHA512:
6
- metadata.gz: aab1febfdc72a126e9edb1b496b661d236d3ae6689d5bf29350d4bffdbb4e9551e03487c2a7fc01d6ec7f15ff55d0872db8f90fb791474834982168aca88e531
7
- data.tar.gz: 317de9a4ef0d5423b57de20cb9220cdecd598d431d67f03b025fd05c0a2ce2d013eec4e3da816ce50a35722c161aef01a90ee376add0e0ebc06c779b0e296370
6
+ metadata.gz: ff1e9c6f6b7734ffadfd5a0c33a1412b4542445a9d86840847885269b36a764faa69349c313d8ed085493dca8544a77609577f595cad884564bca4164064d991
7
+ data.tar.gz: 94dc18a2b253e9a2fda57b51067b191d49ea88df04128c3f7bfb88745c99afe33d9dc64bdde7fb79f7b58a3cab2b2cbf08d6bf2d298b316e488cc439bb6481e3
@@ -33,7 +33,7 @@ module HttpCrawler
33
33
  #
34
34
  def initialize
35
35
  raise "Client uri为空" unless init_uri
36
- @http = Crawler::HTTP.new(uri.host, uri.port)
36
+ @http = HttpCrawler::HTTP.new(uri.host, uri.port)
37
37
 
38
38
  @http.use_ssl = (uri.scheme == "https")
39
39
 
@@ -6,7 +6,7 @@ module HttpCrawler
6
6
 
7
7
  # 自动获取代理,true 表示自动获取代理 、false 表示不自动获取
8
8
  attr_accessor :auto_proxy
9
- # 代理API的别名 主要关联 Crawler::Proxy中维护的代理API
9
+ # 代理API的别名 主要关联 HttpCrawler::Proxy中维护的代理API
10
10
  attr_accessor :proxy_api
11
11
  # 调用自己的代理池所需要的主键 key
12
12
  attr_accessor :proxy_key
@@ -58,7 +58,7 @@ module HttpCrawler
58
58
 
59
59
  while @@proxy_list.blank?
60
60
  Rails.logger.debug("@@proxy_list 为空进行更新")
61
- proxy_client = Crawler::Proxy.for(proxy_api)
61
+ proxy_client = HttpCrawler::Proxy.for(proxy_api)
62
62
  proxy_r = proxy_client.get_proxy(key: proxy_key)
63
63
  @@proxy_list << proxy_r.parsing
64
64
  Rails.logger.debug("@@proxy_list => #{@@proxy_list}")
@@ -3,8 +3,8 @@ module HttpCrawler
3
3
  module TestProxyApi
4
4
  class Client
5
5
 
6
- include(Crawler::Client)
7
- include(Crawler::Proxy::Client)
6
+ include(HttpCrawler::Client)
7
+ include(HttpCrawler::Proxy::Client)
8
8
 
9
9
  class << self
10
10
  def new(*args)
@@ -19,11 +19,11 @@ module HttpCrawler
19
19
  # http://39.108.59.38:7772/Tools/proxyIP.ashx?OrderNumber=ccd4c8912691f28861a1ed048fec88dc&poolIndex=22717&cache=1&qty=2
20
20
  def get_proxy(parameter = {})
21
21
  r = http.get_fetch("/api/get_proxy")
22
- r.extend(Crawler::Proxy::Laofu::Response::GetProxy)
22
+ r.extend(HttpCrawler::Proxy::Laofu::Response::GetProxy)
23
23
  end
24
24
 
25
25
  end
26
26
  end # module BiQuGe_DuQuanBen
27
27
  end # module Web
28
- end # module Crawler
28
+ end # module HttpCrawler
29
29
 
@@ -19,6 +19,6 @@ module HttpCrawler
19
19
  end # module Response
20
20
  end # module Laofu
21
21
  end # module Proxy
22
- end # module Crawler
22
+ end # module HttpCrawler
23
23
 
24
24
 
@@ -4,11 +4,11 @@ module HttpCrawler
4
4
  class << self
5
5
 
6
6
  # 接收格式
7
- # web_name = "feilong"
8
- # 返回 Crawler::Proxy::Feilong::Client 实例
7
+ # web_name = "test_proxy_api"
8
+ # 返回 HttpCrawler::Proxy::TestProxyApi::Client 实例
9
9
  #
10
10
  def for(web_name, *arg)
11
- "Crawler::Proxy::#{web_name.camelize}::Client".constantize.new(*arg)
11
+ "HttpCrawler::Proxy::#{web_name.camelize}::Client".constantize.new(*arg)
12
12
  end
13
13
 
14
14
  end
@@ -1,3 +1,3 @@
1
1
  module HttpCrawler
2
- VERSION = "0.2.0"
2
+ VERSION = "0.2.1"
3
3
  end
@@ -2,7 +2,7 @@ module HttpCrawler
2
2
  module Web
3
3
  module Baidu
4
4
  class Client
5
- include(Crawler::Client)
5
+ include(HttpCrawler::Client)
6
6
 
7
7
  def init_http
8
8
  @http.open_timeout = 3
@@ -21,5 +21,5 @@ module HttpCrawler
21
21
  end
22
22
  end # module Baidu
23
23
  end # module Web
24
- end # module Crawler
24
+ end # module HttpCrawler
25
25
 
@@ -11,6 +11,6 @@ module HttpCrawler
11
11
  end # module Response
12
12
  end # module Qichacha
13
13
  end # module Web
14
- end # module Crawler
14
+ end # module HttpCrawler
15
15
 
16
16
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: http_crawler
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - jagger