http_crawler 0.3.1.13 → 0.3.1.14

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b691ef837786382eeaae3b6368bbd6709ddc2d4fbee03fac819fa6e691f89e03
4
- data.tar.gz: c5e94dd28381d78dd2798dc74f9d83498fa33e2c173b7edc62f7bd73a8692748
3
+ metadata.gz: 2d03aea734e2bfd211fc79119ff71ecd06de362ad7d056fa536f2c8484f54a37
4
+ data.tar.gz: 9f6f1b5af8f5ecaa8b4eb5a176ee7515388a66f00a754d92477113bfdfef0f02
5
5
  SHA512:
6
- metadata.gz: e05f70823be3fb9e88768f508f726c1d2769fd255df1eca0007748854deb4b123623cc660b034cee72273014effbde05a860b398b87e3fad7a2d794c63adb2e1
7
- data.tar.gz: ce6b3a0f35a2e70adf9a6747a2808ebe066d35c988a186e910979353553dd882ae3599285cf73e6bffcf4f7688b37f43df27410e693e68cf7700f7eccfcb21e6
6
+ metadata.gz: c1882c7ac5b646a8ebbf034e6e384fbb527a3ac795adcd5c185d5532a66733a7a17fab3556883970754e19f0814f8adeafba3950bc93d55d7fda0178afca5f09
7
+ data.tar.gz: 92146c75ba8f3097651aa0d379a8c4d19249ea588791880c4b6a5580a25040e0e51a1fc99c7df30ea9648616c5e729fafa21daaa5b09552767c78051a7cea3ae
@@ -27,6 +27,36 @@ module HttpCrawler
27
27
  end
28
28
  end
29
29
 
30
+ #
31
+ # init_uri 如果未初始化@uri,则会报错
32
+ # 继承类需要重定义 init_uri
33
+ #
34
+ def initialize(parameter = {})
35
+ parameter = parameter.symbolize_keys
36
+
37
+ parameter[:uri_or_path] = parameter[:url]||parameter[:uri]
38
+
39
+ if parameter[:uri_or_path]
40
+ # 如果自定义uri
41
+ raise "Client uri为重复初始化" if uri
42
+ update_uri(parameter[:uri_or_path])
43
+ else
44
+ # 初始化 uri
45
+ init_uri
46
+ end
47
+
48
+ # 初始化超时时间
49
+ init_timeout
50
+
51
+ # 初始化 ssl 协议
52
+ init_ssl unless uri.blank?
53
+
54
+ # 初始化一些 client 自定义参数
55
+ init_client
56
+
57
+ # 初始化 代理参数
58
+ @proxy_params = {key: "#{self.class.to_s.gsub(":","_")}"}
59
+ end
30
60
 
31
61
  attr_accessor :max_error_num
32
62
  # 最大错误重试次数
@@ -238,33 +268,6 @@ module HttpCrawler
238
268
  h
239
269
  end
240
270
 
241
- #
242
- # init_uri 如果未初始化@uri,则会报错
243
- # 继承类需要重定义 init_uri
244
- #
245
- def initialize(parameter = {})
246
- parameter = parameter.symbolize_keys
247
- # 初始化 uri
248
- init_uri
249
-
250
- # 如果自定义uri
251
- if parameter[:uri]
252
- raise "Client uri为重复初始化" if uri
253
- update_uri(parameter[:uri])
254
- end
255
-
256
- # 初始化超时时间
257
- init_timeout
258
-
259
- # 初始化 ssl 协议
260
- init_ssl
261
-
262
- # 初始化一些 client 自定义参数
263
- init_client
264
-
265
- # 初始化 代理参数
266
- @proxy_params = {key: "#{self.class.to_s.gsub(":","_")}"}
267
- end
268
271
 
269
272
  # 发送 get 请求
270
273
  def get(path, params = {})
@@ -0,0 +1,9 @@
1
+ module HttpCrawler
2
+ module Decryption
3
+ class << self
4
+ def node(path)
5
+ HttpCrawler::Decryption::Node.decryption(path)
6
+ end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1 @@
1
+ # 破解模块
@@ -0,0 +1,22 @@
1
+ module HttpCrawler
2
+ module Decryption
3
+ module Node
4
+
5
+ # def method_missing(methodname, *args)
6
+ # http_crawler = HttpCrawler::Client.new(url: "http://127.0.0.1:8080/");
7
+ # r = http_crawler.get("/#{self.to_s.gsub("HttpCrawler::Decryption::Node::", "").underscore}/#{methodname}.js");
8
+ # r.dec
9
+ # end
10
+
11
+ class << self
12
+ def decryption(path)
13
+ path = path + ".js" unless path =~ /\.js$/
14
+ # p path
15
+ http_crawler = HttpCrawler::Client.new(url: "http://127.0.0.1:8080/");
16
+ r = http_crawler.get(path);
17
+ r.dec
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -1,3 +1,3 @@
1
1
  module HttpCrawler
2
- VERSION = "0.3.1.13"
2
+ VERSION = "0.3.1.14"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: http_crawler
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1.13
4
+ version: 0.3.1.14
5
5
  platform: ruby
6
6
  authors:
7
7
  - jagger
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-04-19 00:00:00.000000000 Z
11
+ date: 2019-04-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rspec
@@ -137,6 +137,9 @@ files:
137
137
  - lib/http_crawler/common/integer.rb
138
138
  - lib/http_crawler/common/object.rb
139
139
  - lib/http_crawler/common/string.rb
140
+ - lib/http_crawler/decryption.rb
141
+ - lib/http_crawler/decryption/README.md
142
+ - lib/http_crawler/decryption/node.rb
140
143
  - lib/http_crawler/errors.rb
141
144
  - lib/http_crawler/http/response.rb
142
145
  - lib/http_crawler/proxy.rb