webrobots 0.0.7 → 0.0.8

Sign up to get free protection for your applications and to get access to all the features.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.7
1
+ 0.0.8
data/lib/webrobots.rb CHANGED
@@ -41,8 +41,9 @@ class WebRobots
41
41
  # a relative URI or a non-HTTP/HTTPS URI is given, ArgumentError is
42
42
  # raised.
43
43
  def allowed?(url)
44
- robots_txt, request_uri = evaluate(url)
44
+ site, request_uri = split_uri(url)
45
45
  return true if request_uri == '/robots.txt'
46
+ robots_txt = get_robots_txt(site)
46
47
  robots_txt.allow?(request_uri)
47
48
  end
48
49
 
@@ -115,11 +116,6 @@ class WebRobots
115
116
  return site, request_uri
116
117
  end
117
118
 
118
- def evaluate(url)
119
- site, request_uri = split_uri(url)
120
- return get_robots_txt(site), request_uri
121
- end
122
-
123
119
  def robots_txt_for(url)
124
120
  site, = split_uri(url)
125
121
  get_robots_txt(site)
@@ -517,4 +517,32 @@ Disallow: /
517
517
  assert @doc.meta_robots('googlebot').include?('noarchive')
518
518
  end
519
519
  end
520
+
521
+ class Agent
522
+ def initialize
523
+ @robots = WebRobots.new 'agent', :http_get => method(:get)
524
+ end
525
+
526
+ def get uri
527
+ @robots.allowed? uri
528
+
529
+ if uri.request_uri == '/robots.txt' then
530
+ ''
531
+ else
532
+ 'content'
533
+ end
534
+ end
535
+ end
536
+
537
+ context "embedded in a user-agent" do
538
+ setup do
539
+ @agent = Agent.new
540
+ end
541
+
542
+ should "fetch robots.txt" do
543
+ body = @agent.get URI.parse 'http://example/robots.html'
544
+
545
+ assert_equal 'content', body
546
+ end
547
+ end
520
548
  end
data/webrobots.gemspec CHANGED
@@ -5,11 +5,11 @@
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = %q{webrobots}
8
- s.version = "0.0.7"
8
+ s.version = "0.0.8"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11
11
  s.authors = ["Akinori MUSHA"]
12
- s.date = %q{2011-02-01}
12
+ s.date = %q{2011-04-11}
13
13
  s.description = %q{This library helps write robots.txt compliant web robots in Ruby.
14
14
  }
15
15
  s.email = %q{knu@idaemons.org}
@@ -35,7 +35,7 @@ Gem::Specification.new do |s|
35
35
  ]
36
36
  s.licenses = ["2-clause BSDL"]
37
37
  s.require_paths = ["lib"]
38
- s.rubygems_version = %q{1.4.2}
38
+ s.rubygems_version = %q{1.6.2}
39
39
  s.summary = %q{A Ruby library to help write robots.txt compliant web robots}
40
40
  s.test_files = [
41
41
  "test/helper.rb",
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: webrobots
3
3
  version: !ruby/object:Gem::Version
4
- hash: 17
4
+ hash: 15
5
5
  prerelease:
6
6
  segments:
7
7
  - 0
8
8
  - 0
9
- - 7
10
- version: 0.0.7
9
+ - 8
10
+ version: 0.0.8
11
11
  platform: ruby
12
12
  authors:
13
13
  - Akinori MUSHA
@@ -15,13 +15,11 @@ autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
17
 
18
- date: 2011-02-01 00:00:00 +09:00
18
+ date: 2011-04-11 00:00:00 +09:00
19
19
  default_executable:
20
20
  dependencies:
21
21
  - !ruby/object:Gem::Dependency
22
- prerelease: false
23
22
  name: racc
24
- type: :runtime
25
23
  version_requirements: &id001 !ruby/object:Gem::Requirement
26
24
  none: false
27
25
  requirements:
@@ -31,11 +29,11 @@ dependencies:
31
29
  segments:
32
30
  - 0
33
31
  version: "0"
32
+ prerelease: false
33
+ type: :runtime
34
34
  requirement: *id001
35
35
  - !ruby/object:Gem::Dependency
36
- prerelease: false
37
36
  name: nokogiri
38
- type: :runtime
39
37
  version_requirements: &id002 !ruby/object:Gem::Requirement
40
38
  none: false
41
39
  requirements:
@@ -47,11 +45,11 @@ dependencies:
47
45
  - 4
48
46
  - 4
49
47
  version: 1.4.4
48
+ prerelease: false
49
+ type: :runtime
50
50
  requirement: *id002
51
51
  - !ruby/object:Gem::Dependency
52
- prerelease: false
53
52
  name: shoulda
54
- type: :development
55
53
  version_requirements: &id003 !ruby/object:Gem::Requirement
56
54
  none: false
57
55
  requirements:
@@ -61,11 +59,11 @@ dependencies:
61
59
  segments:
62
60
  - 0
63
61
  version: "0"
62
+ prerelease: false
63
+ type: :development
64
64
  requirement: *id003
65
65
  - !ruby/object:Gem::Dependency
66
- prerelease: false
67
66
  name: bundler
68
- type: :development
69
67
  version_requirements: &id004 !ruby/object:Gem::Requirement
70
68
  none: false
71
69
  requirements:
@@ -77,11 +75,11 @@ dependencies:
77
75
  - 0
78
76
  - 0
79
77
  version: 1.0.0
78
+ prerelease: false
79
+ type: :development
80
80
  requirement: *id004
81
81
  - !ruby/object:Gem::Dependency
82
- prerelease: false
83
82
  name: jeweler
84
- type: :development
85
83
  version_requirements: &id005 !ruby/object:Gem::Requirement
86
84
  none: false
87
85
  requirements:
@@ -93,11 +91,11 @@ dependencies:
93
91
  - 5
94
92
  - 1
95
93
  version: 1.5.1
94
+ prerelease: false
95
+ type: :development
96
96
  requirement: *id005
97
97
  - !ruby/object:Gem::Dependency
98
- prerelease: false
99
98
  name: rcov
100
- type: :development
101
99
  version_requirements: &id006 !ruby/object:Gem::Requirement
102
100
  none: false
103
101
  requirements:
@@ -107,11 +105,11 @@ dependencies:
107
105
  segments:
108
106
  - 0
109
107
  version: "0"
108
+ prerelease: false
109
+ type: :development
110
110
  requirement: *id006
111
111
  - !ruby/object:Gem::Dependency
112
- prerelease: false
113
112
  name: racc
114
- type: :development
115
113
  version_requirements: &id007 !ruby/object:Gem::Requirement
116
114
  none: false
117
115
  requirements:
@@ -121,6 +119,8 @@ dependencies:
121
119
  segments:
122
120
  - 0
123
121
  version: "0"
122
+ prerelease: false
123
+ type: :development
124
124
  requirement: *id007
125
125
  description: |
126
126
  This library helps write robots.txt compliant web robots in Ruby.
@@ -178,7 +178,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
178
178
  requirements: []
179
179
 
180
180
  rubyforge_project:
181
- rubygems_version: 1.4.2
181
+ rubygems_version: 1.6.2
182
182
  signing_key:
183
183
  specification_version: 3
184
184
  summary: A Ruby library to help write robots.txt compliant web robots