middleman-robots 1.2.3 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,69 +1,31 @@
1
+ require 'middleman-robots/generator'
2
+
1
3
  module Middleman
2
4
  module Robots
3
5
  # Robots Extension Class
4
- #
5
- # Create robots.txt when `$ middleman build`
6
6
  class Extension < ::Middleman::Extension
7
7
  option :rules, [], 'List of rules about sitemap.xml'
8
8
  option :sitemap, false, 'URI of sitemap.xml'
9
9
 
10
10
  def initialize(app, options_hash = {}, &block)
11
11
  super
12
- build_dir = app.config.build_dir
13
-
14
- data = rules(options.rules) + sitemap(options.sitemap)
15
- data.gsub!(/\n+$/, "\n")
16
-
17
- app.after_build do
18
- File.open(File.join(build_dir, 'robots.txt'), 'w') do |file|
19
- file.puts(data)
20
- end
21
- logger.info '== middleman-robots: robots.txt created =='
22
- end
23
- end
24
-
25
- def rules(rules)
26
- return '' if rules.empty?
27
- data = []
28
- rules.each do |rule|
29
- row = []
30
- row << user_agent(rule)
31
- row << disallow(rule)
32
- row << allow(rule)
33
- row.compact!
34
- data << row.join("\n") + "\n\n" if row.length > 0
35
- end
36
- data.join('')
37
- end
38
-
39
- def user_agent(rule)
40
- return unless rule.key?('user-agent') || rule.key?(:user_agent)
41
- user_agent = rule[:user_agent] || rule['user-agent']
42
- "User-Agent: #{user_agent}"
43
- end
44
-
45
- def disallow(rule)
46
- return unless rule.key?(:disallow)
47
- lines = []
48
- rule[:disallow].each do |path|
49
- path = File.join('/', path) unless /^\// =~ path
50
- lines << "Disallow: #{path}"
51
- end
52
- lines
53
12
  end
54
13
 
55
- def allow(rule)
56
- return unless rule.key?(:allow)
57
- lines = []
58
- rule[:allow].each do |path|
59
- path = File.join('/' + path) unless /^\// =~ path
60
- lines << "Allow: #{path}"
61
- end
62
- lines
14
+ def manipulate_resource_list(resources)
15
+ data = Generator.new(options.rules, options.sitemap).process
16
+ robots = Middleman::Sitemap::Resource.new(
17
+ app.sitemap,
18
+ 'robots.txt',
19
+ File.join(template_dir, 'robots.txt.erb')
20
+ )
21
+ robots.add_metadata(locals: {robots: data})
22
+
23
+ logger.info '== middleman-robots: robots.txt added to resources =='
24
+ resources << robots
63
25
  end
64
26
 
65
- def sitemap(path)
66
- path ? "Sitemap: #{path}" : ''
27
+ def template_dir
28
+ File.expand_path(File.join('..', 'templates'), __FILE__)
67
29
  end
68
30
  end
69
31
  end
@@ -0,0 +1,42 @@
1
+ require 'middleman-robots/group'
2
+
3
+ module Middleman
4
+ module Robots
5
+ # Robots Text Generator Class
6
+ class Generator
7
+ def initialize(rules, sitemap_uri)
8
+ @rules = rules
9
+ @sitemap_uri = sitemap_uri
10
+ end
11
+
12
+ def process
13
+ blocks = block_text
14
+ sitemap = sitemap_text
15
+
16
+ if !blocks.empty? && !sitemap.empty?
17
+ blocks + "\n" + sitemap
18
+ elsif !blocks.empty?
19
+ blocks
20
+ elsif !sitemap.empty?
21
+ sitemap
22
+ else
23
+ ''
24
+ end
25
+ end
26
+
27
+ private
28
+ def block_text
29
+ return '' if @rules.empty?
30
+ data = []
31
+ @rules.each do |rule|
32
+ data << Group.new(rule).text
33
+ end
34
+ data.join("\n")
35
+ end
36
+
37
+ def sitemap_text
38
+ @sitemap_uri ? "Sitemap: #{@sitemap_uri}" : ''
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,50 @@
1
+ module Middleman
2
+ module Robots
3
+ # Robots Group Class
4
+ #
5
+ # Group class generate block in robots.txt
6
+ class Group
7
+ attr_reader :user_agent, :disallow, :allow
8
+
9
+ def initialize(rule)
10
+ @user_agent = generate_user_agent(rule)
11
+ @disallow = generate_disallow(rule)
12
+ @allow = generate_allow(rule)
13
+ end
14
+
15
+ def text
16
+ group = []
17
+ group << "User-Agent: #{@user_agent}" unless @user_agent.empty?
18
+ group << @disallow.collect{|item| "Disallow: #{item}" }.join("\n") if @disallow.length > 0
19
+ group << @allow.collect{|item| "Allow: #{item}" }.join("\n") if @allow.length > 0
20
+ group.join("\n") + "\n"
21
+ end
22
+
23
+ private
24
+ def generate_user_agent(rule)
25
+ return '*' unless rule.key?('user-agent') || rule.key?(:user_agent)
26
+ rule[:user_agent] || rule['user-agent']
27
+ end
28
+
29
+ def generate_disallow(rule)
30
+ paths = []
31
+ return paths unless rule.has_key?(:disallow)
32
+
33
+ rule[:disallow].each do |path|
34
+ paths << File.join('/', path)
35
+ end
36
+ paths
37
+ end
38
+
39
+ def generate_allow(rule)
40
+ paths = []
41
+ return paths unless rule.has_key?(:allow)
42
+
43
+ rule[:allow].each do |path|
44
+ paths << File.join('/', path)
45
+ end
46
+ paths
47
+ end
48
+ end
49
+ end
50
+ end
@@ -0,0 +1 @@
1
+ <%= robots %>
@@ -1,6 +1,6 @@
1
1
  module Middleman
2
2
  # Middleman robots module
3
3
  module Robots
4
- VERSION = '1.2.3'
4
+ VERSION = '1.3.0'
5
5
  end
6
6
  end
@@ -8,8 +8,8 @@ Gem::Specification.new do |spec|
8
8
  spec.version = Middleman::Robots::VERSION
9
9
  spec.authors = ["Yuya Matsushima"]
10
10
  spec.email = ["terra@e2esound.com"]
11
- spec.summary = %q{Create robots.txt when do 'build'.}
12
- spec.description = %q{Create robots.txt when do 'build'.}
11
+ spec.summary = %q{Generate robots.txt by config.rb.}
12
+ spec.description = %q{Generate robots.txt by config.rb.}
13
13
  spec.homepage = "https://github.com/yterajima/middleman-robots"
14
14
  spec.license = "MIT"
15
15
 
@@ -0,0 +1,37 @@
1
+ require 'minitest/autorun'
2
+ require_relative '../lib/middleman-robots/generator.rb'
3
+
4
+ class Test_Generator < MiniTest::Test
5
+ def test_process
6
+ rules = [
7
+ {
8
+ user_agent: 'Googlebot',
9
+ disallow: %w(tmp/* /something/dir/file_disallow.html),
10
+ allow: %w(allow/* /something/dir/file_allow.html)
11
+ },
12
+ {
13
+ user_agent: 'Googlebot-Image',
14
+ disallow: %w(tmp/* /something/dir/file_disallow.html),
15
+ allow: %w(allow/* /something/dir/file_allow.html)
16
+ }
17
+ ]
18
+ sitemap_uri = 'http://example.com/sitemap.xml'
19
+ generator = Middleman::Robots::Generator.new(rules, sitemap_uri)
20
+
21
+ expected = "User-Agent: Googlebot
22
+ Disallow: /tmp/*
23
+ Disallow: /something/dir/file_disallow.html
24
+ Allow: /allow/*
25
+ Allow: /something/dir/file_allow.html
26
+
27
+ User-Agent: Googlebot-Image
28
+ Disallow: /tmp/*
29
+ Disallow: /something/dir/file_disallow.html
30
+ Allow: /allow/*
31
+ Allow: /something/dir/file_allow.html
32
+
33
+ Sitemap: http://example.com/sitemap.xml"
34
+
35
+ assert_equal expected, generator.process
36
+ end
37
+ end
@@ -0,0 +1,121 @@
1
+ require 'minitest/autorun'
2
+ require_relative '../lib/middleman-robots/group.rb'
3
+
4
+ class Test_Group < MiniTest::Test
5
+
6
+ def test_initialize
7
+ rule = {
8
+ user_agent: 'GoogleBot',
9
+ disallow: %w(tmp/* /someting/dir/disallow.html),
10
+ allow: %w(allow/* /someting/dir/allow.html),
11
+ }
12
+ group = Middleman::Robots::Group.new(rule)
13
+
14
+ assert_equal rule[:user_agent], group.user_agent
15
+ assert_equal %w(/tmp/* /someting/dir/disallow.html), group.disallow
16
+ assert_equal %w(/allow/* /someting/dir/allow.html), group.allow
17
+ end
18
+
19
+ def test_initialize_without_user_agent
20
+ rule = {
21
+ disallow: %w(/tmp/*),
22
+ allow: %w(/allow/*),
23
+ }
24
+ group = Middleman::Robots::Group.new(rule)
25
+
26
+ assert_equal '*', group.user_agent
27
+ assert_equal rule[:disallow], group.disallow
28
+ assert_equal rule[:allow], group.allow
29
+ end
30
+
31
+ def test_initialize_without_disallow
32
+ rule = {
33
+ user_agent: 'GoogleBot',
34
+ allow: %w(/allow/* /someting/dir/allow.html),
35
+ }
36
+ group = Middleman::Robots::Group.new(rule)
37
+
38
+ assert_equal rule[:user_agent], group.user_agent
39
+ assert_equal [], group.disallow
40
+ assert_equal rule[:allow], group.allow
41
+ end
42
+
43
+ def test_initialize_without_allow
44
+ rule = {
45
+ user_agent: 'GoogleBot',
46
+ disallow: %w(/tmp/* /someting/dir/disallow.html),
47
+ }
48
+ group = Middleman::Robots::Group.new(rule)
49
+
50
+ assert_equal rule[:user_agent], group.user_agent
51
+ assert_equal rule[:disallow], group.disallow
52
+ assert_equal [], group.allow
53
+ end
54
+
55
+ def test_text
56
+ rule = {
57
+ user_agent: 'GoogleBot',
58
+ disallow: %w(tmp/* /someting/dir/disallow.html),
59
+ allow: %w(allow/* /someting/dir/allow.html),
60
+ }
61
+ group = Middleman::Robots::Group.new(rule)
62
+
63
+ expected = <<-END
64
+ User-Agent: GoogleBot
65
+ Disallow: /tmp/*
66
+ Disallow: /someting/dir/disallow.html
67
+ Allow: /allow/*
68
+ Allow: /someting/dir/allow.html
69
+ END
70
+
71
+ assert_equal expected, group.text
72
+ end
73
+
74
+ def test_text_without_user_agent
75
+ rule = {
76
+ disallow: %w(/tmp/*),
77
+ allow: %w(/allow/*),
78
+ }
79
+ group = Middleman::Robots::Group.new(rule)
80
+
81
+ expected = <<-END
82
+ User-Agent: *
83
+ Disallow: /tmp/*
84
+ Allow: /allow/*
85
+ END
86
+
87
+ assert_equal expected, group.text
88
+ end
89
+
90
+ def test_text_without_disallow
91
+ rule = {
92
+ user_agent: 'GoogleBot',
93
+ allow: %w(/allow/* /someting/dir/allow.html),
94
+ }
95
+ group = Middleman::Robots::Group.new(rule)
96
+
97
+ expected = <<-END
98
+ User-Agent: GoogleBot
99
+ Allow: /allow/*
100
+ Allow: /someting/dir/allow.html
101
+ END
102
+
103
+ assert_equal expected, group.text
104
+ end
105
+
106
+ def test_text_without_allow
107
+ rule = {
108
+ user_agent: 'GoogleBot',
109
+ disallow: %w(/tmp/* /someting/dir/disallow.html),
110
+ }
111
+ group = Middleman::Robots::Group.new(rule)
112
+
113
+ expected = <<-END
114
+ User-Agent: GoogleBot
115
+ Disallow: /tmp/*
116
+ Disallow: /someting/dir/disallow.html
117
+ END
118
+
119
+ assert_equal expected, group.text
120
+ end
121
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: middleman-robots
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.3
4
+ version: 1.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yuya Matsushima
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-07-03 00:00:00.000000000 Z
11
+ date: 2017-04-29 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: middleman
@@ -94,7 +94,7 @@ dependencies:
94
94
  - - ">="
95
95
  - !ruby/object:Gem::Version
96
96
  version: '10'
97
- description: Create robots.txt when do 'build'.
97
+ description: Generate robots.txt by config.rb.
98
98
  email:
99
99
  - terra@e2esound.com
100
100
  executables: []
@@ -108,17 +108,28 @@ files:
108
108
  - README.jp.md
109
109
  - README.md
110
110
  - Rakefile
111
- - features/robots.feature
111
+ - features/build.feature
112
+ - features/server.feature
112
113
  - features/support/env.rb
113
114
  - fixtures/basic-app/source/index.html.erb
114
115
  - fixtures/basic-app/source/layouts/layout.erb
115
116
  - fixtures/basic-app/source/stylesheets/all.css
116
117
  - fixtures/basic-app/source/stylesheets/normalize.css
118
+ - fixtures/server-app/config.rb
119
+ - fixtures/server-app/source/index.html.erb
120
+ - fixtures/server-app/source/layouts/layout.erb
121
+ - fixtures/server-app/source/stylesheets/all.css
122
+ - fixtures/server-app/source/stylesheets/normalize.css
117
123
  - lib/middleman-robots.rb
118
124
  - lib/middleman-robots/extension.rb
125
+ - lib/middleman-robots/generator.rb
126
+ - lib/middleman-robots/group.rb
127
+ - lib/middleman-robots/templates/robots.txt.erb
119
128
  - lib/middleman-robots/version.rb
120
129
  - lib/middleman_extension.rb
121
130
  - middleman-robots.gemspec
131
+ - tests/test_generator.rb
132
+ - tests/test_group.rb
122
133
  homepage: https://github.com/yterajima/middleman-robots
123
134
  licenses:
124
135
  - MIT
@@ -139,10 +150,11 @@ required_rubygems_version: !ruby/object:Gem::Requirement
139
150
  version: '0'
140
151
  requirements: []
141
152
  rubyforge_project:
142
- rubygems_version: 2.5.1
153
+ rubygems_version: 2.6.11
143
154
  signing_key:
144
155
  specification_version: 4
145
- summary: Create robots.txt when do 'build'.
156
+ summary: Generate robots.txt by config.rb.
146
157
  test_files:
147
- - features/robots.feature
158
+ - features/build.feature
159
+ - features/server.feature
148
160
  - features/support/env.rb