robotstxt-parser 0.1.0 → 0.1.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/robotstxt/getter.rb +1 -1
- data/robotstxt.gemspec +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fec9087c0cf406df79a6188389c9c245b0cb0db4
|
4
|
+
data.tar.gz: 07e62e35eb1a7ed8d132bf0344b5d7a79e7825a4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8c9a07082970793cfa87df01dc965cd1dec8754f9fc09a698a5ebef8b9e2fdd6e100f8a64c68100c4dfa98cbbff61eafd0b753873858c0de75d98ba80b36e513
|
7
|
+
data.tar.gz: 447d4dec8e7f0aee6b07e94210116a7252160a57b69f97a399464639e0e8274e1b7b7cfbc4b536dd5c70f29c1edec31614ebbc3fa23dd61c6f51ed054cb0f83c
|
data/lib/robotstxt/getter.rb
CHANGED
@@ -69,7 +69,7 @@ module Robotstxt
|
|
69
69
|
# In the case that we can't decode, Ruby's laissez faire attitude to encoding
|
70
70
|
# should mean that we have a reasonable chance of working anyway.
|
71
71
|
def decode_body(response)
|
72
|
-
return
|
72
|
+
return "" if response.body.blank? || response.body.nil?
|
73
73
|
Robotstxt.ultimate_scrubber(response.body)
|
74
74
|
end
|
75
75
|
|
data/robotstxt.gemspec
CHANGED
@@ -3,7 +3,7 @@ $:.push File.expand_path("../lib", __FILE__)
|
|
3
3
|
|
4
4
|
Gem::Specification.new do |gem|
|
5
5
|
gem.name = "robotstxt-parser"
|
6
|
-
gem.version = "0.1.
|
6
|
+
gem.version = "0.1.1"
|
7
7
|
gem.authors = ["Garen Torikian"]
|
8
8
|
gem.email = ["gjtorikian@gmail.com"]
|
9
9
|
gem.description = %q{Robotstxt-Parser allows you to the check the accessibility of URLs and get other data. Full support for the robots.txt RFC, wildcards and Sitemap: rules.}
|