validate-website 1.9.3 → 1.10.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9bc6ec31c18ead2191fb2c32b24cb7760b84cd9edc296e1ae5a16ef904ca4fd1
4
- data.tar.gz: a5d08b83a6e6263bc3be738aaeab2836b14019575a385a801e5240cab23e1415
3
+ metadata.gz: 4a5b0553089c9d66d3622781fe6c4ab5ca68ac6a198fe36c53c84c8d34e14adb
4
+ data.tar.gz: f3f0cc4aef203f85ebb9868e793b7cd0312db8c4b6f7fb0d15beefeaaef1cc83
5
5
  SHA512:
6
- metadata.gz: 1d1ed522d9721e37c17db84fc63257afe632622a684012aa2d6585cfeb1f740482c887977f94a208e3844c6c6d4731d2511ea2e1dbf52076585869e4a3d3b61d
7
- data.tar.gz: e4c0d75cbfb629a41d7eccbb766be3aaa0dff5bf9dd1532b6786e02a11ad7fe3955b4c5516cc5ea52358114b64218268de5fa66230f16978ef11820d7021df40
6
+ metadata.gz: c9a8fae92bcb19d5f92466a7b77f3e40cca59643c946c15563885d9e65a1d2793271dee2b723a296fe487d57634fdd88f4440a60d2fac625a6a7208c8105159d
7
+ data.tar.gz: e7a78f899b42d5f27cb41e99472e37499389bf39c567dd9f16ab064fce38927a43de89c00411c25697c622ad76f59b8d4b9836d898249822cf9bc0d37591c3bf
data/History.md CHANGED
@@ -1,4 +1,15 @@
1
1
 
2
+ 1.10.0 / 2020-07-03
3
+ ==================
4
+
5
+ * Fix build for Ruby 2.3 and 2.4
6
+ * Remove rbx-3 from build
7
+ * Remove minitest-focus and fix minitest 6 warnings
8
+ * Fix html5_validator option and change html5_validator_service_url
9
+ * Add Ruby 2.7 to CI and update jruby
10
+ * Update rubocop and fix offences
11
+ * Remove Ruby 2.2 support and update rubocop
12
+
2
13
  1.9.3 / 2019-04-11
3
14
  ==================
4
15
 
data/LICENSE CHANGED
@@ -1,6 +1,6 @@
1
1
  The MIT License
2
2
 
3
- Copyright (c) 2009-2019 Laurent Arnoud <laurent@spkdev.net>
3
+ Copyright (c) 2009-2020 Laurent Arnoud <laurent@spkdev.net>
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining
6
6
  a copy of this software and associated documentation files (the
data/README.md CHANGED
@@ -145,7 +145,7 @@ See [GitHub](https://github.com/spk/validate-website/graphs/contributors).
145
145
 
146
146
  The MIT License
147
147
 
148
- Copyright (c) 2009-2019 Laurent Arnoud <laurent@spkdev.net>
148
+ Copyright (c) 2009-2020 Laurent Arnoud <laurent@spkdev.net>
149
149
 
150
150
  ---
151
151
  [![Build](https://img.shields.io/travis/spk/validate-website.svg)](https://travis-ci.org/spk/validate-website)
data/Rakefile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'rake/testtask'
2
4
  require 'rubocop/rake_task'
3
5
  require 'asciidoctor'
@@ -1,4 +1,5 @@
1
1
  #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
2
3
 
3
4
  require 'validate_website/runner'
4
5
  exit_status = ValidateWebsite::Runner.run_crawl(ARGV)
@@ -1,4 +1,5 @@
1
1
  #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
2
3
 
3
4
  require 'validate_website/runner'
4
5
  exit_status = ValidateWebsite::Runner.run_static(ARGV)
@@ -1,2 +1,4 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'validate_website/core'
2
4
  require 'validate_website/version'
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'paint'
2
4
 
3
5
  module ValidateWebsite
@@ -5,6 +7,7 @@ module ValidateWebsite
5
7
  module ColorfulMessages
6
8
  def color(type, message, colored = true)
7
9
  return message unless colored
10
+
8
11
  send(type, message)
9
12
  end
10
13
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'set'
2
4
  require 'open-uri'
3
5
  require 'webrick/cookie'
@@ -26,7 +28,7 @@ module ValidateWebsite
26
28
  EXIT_FAILURE_MARKUP = 64
27
29
  EXIT_FAILURE_NOT_FOUND = 65
28
30
  EXIT_FAILURE_MARKUP_NOT_FOUND = 66
29
- START_MESSAGE = 'Validating'.freeze
31
+ START_MESSAGE = 'Validating'
30
32
 
31
33
  # Initialize core ValidateWebsite class
32
34
  # @example
@@ -43,11 +45,11 @@ module ValidateWebsite
43
45
  end
44
46
 
45
47
  def errors?
46
- @errors_count > 0
48
+ @errors_count.positive?
47
49
  end
48
50
 
49
51
  def not_founds?
50
- @not_founds_count > 0
52
+ @not_founds_count.positive?
51
53
  end
52
54
 
53
55
  def exit_status
@@ -78,6 +80,7 @@ module ValidateWebsite
78
80
  def check_css_syntax(page)
79
81
  nodes = Crass::Parser.parse_stylesheet(page.body)
80
82
  return unless any_css_errors?(nodes)
83
+
81
84
  handle_validation_error(page.url)
82
85
  end
83
86
 
@@ -111,10 +114,10 @@ module ValidateWebsite
111
114
  # @param [Nokogiri::HTML::Document] original_doc
112
115
  # @param [String] The raw HTTP response body of the page
113
116
  # @param [String] url
114
- # @param [Regexp] Errors to ignore
117
+ # @param [Hash] Validator options
115
118
  #
116
- def validate(doc, body, url, ignore = nil)
117
- validator = Validator.new(doc, body, ignore: ignore)
119
+ def validate(doc, body, url, options)
120
+ validator = Validator.new(doc, body, options)
118
121
  if validator.valid?
119
122
  print color(:success, '.', options[:color]) # rspec style
120
123
  else
@@ -125,6 +128,7 @@ module ValidateWebsite
125
128
  def handle_html_validation_error(validator, url)
126
129
  handle_validation_error(url)
127
130
  return unless options[:verbose]
131
+
128
132
  puts color(:error, validator.errors.join(', '), options[:color])
129
133
  end
130
134
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'validate_website/core'
2
4
  require 'validate_website/utils'
3
5
 
@@ -41,6 +43,7 @@ module ValidateWebsite
41
43
  #
42
44
  def extract_imgs_from_page(page)
43
45
  return Set[] if page.is_redirect?
46
+
44
47
  page.doc.search('//img[@src]').reduce(Set[]) do |result, elem|
45
48
  u = elem.attributes['src'].content
46
49
  result << page.to_absolute(URI.parse(URI.encode(u)))
@@ -77,7 +80,10 @@ module ValidateWebsite
77
80
  end
78
81
 
79
82
  if validate?(page)
80
- validate(page.doc, page.body, page.url, options[:ignore])
83
+ keys = %i[ignore html5_validator]
84
+ # slice does not exists on Ruby <= 2.4
85
+ slice = Hash[[keys, options.values_at(*keys)].transpose]
86
+ validate(page.doc, page.body, page.url, slice)
81
87
  end
82
88
  end
83
89
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'slop'
2
4
  require File.expand_path('version', __dir__)
3
5
 
@@ -29,6 +31,7 @@ module ValidateWebsite
29
31
  # Generic parse method for crawl or static options
30
32
  def self.parse(options, type)
31
33
  raise ArgumentError unless VALID_TYPES.include?(type)
34
+
32
35
  # We are in command line (ARGV)
33
36
  if options.is_a?(Array)
34
37
  send("command_line_parse_#{type}", options)
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'validate_website/core'
2
4
 
3
5
  module ValidateWebsite
@@ -5,7 +7,7 @@ module ValidateWebsite
5
7
  class Runner
6
8
  def self.trap_interrupt
7
9
  trap('INT') do
8
- STDERR.puts "\nExiting..."
10
+ warn "\nExiting..."
9
11
  exit!(1)
10
12
  end
11
13
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'validate_website/core'
2
4
  require 'validate_website/utils'
3
5
 
@@ -5,7 +7,7 @@ module ValidateWebsite
5
7
  # Class for validation Static website
6
8
  class Static < Core
7
9
  CONTENT_TYPES = ['text/html', 'text/xhtml+xml'].freeze
8
- START_MESSAGE_TYPE = 'files'.freeze
10
+ START_MESSAGE_TYPE = 'files'
9
11
 
10
12
  attr_reader :history_count
11
13
 
@@ -24,7 +26,8 @@ module ValidateWebsite
24
26
  files = Dir.glob(@options[:pattern])
25
27
  files.each do |file|
26
28
  next unless File.file?(file)
27
- next if @options[:exclude] && @options[:exclude].match(file)
29
+ next if @options[:exclude]&.match(file)
30
+
28
31
  @history_count += 1
29
32
  check_static_file(file)
30
33
  end
@@ -57,7 +60,10 @@ module ValidateWebsite
57
60
 
58
61
  def check_page(file, page)
59
62
  if page.html? && options[:markup]
60
- validate(page.doc, page.body, file, options[:ignore])
63
+ keys = %i[ignore html5_validator]
64
+ # slice does not exists on Ruby <= 2.4
65
+ slice = Hash[[keys, options.values_at(*keys)].transpose]
66
+ validate(page.doc, page.body, file, slice)
61
67
  end
62
68
  check_static_not_found(page.links) if options[:not_found]
63
69
  end
@@ -68,11 +74,13 @@ module ValidateWebsite
68
74
  static_links = links.map { |l| StaticLink.new(l, @site) }
69
75
  static_links.each do |static_link|
70
76
  next unless static_link.check?
77
+
71
78
  unless File.exist?(static_link.file_path)
72
79
  not_found_error(static_link.file_path)
73
80
  next
74
81
  end
75
82
  next unless static_link.css?
83
+
76
84
  check_static_not_found static_link.extract_urls_from_fake_css_response
77
85
  end
78
86
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'uri'
2
4
  require 'validate_website/utils'
3
5
  require 'validate_website/static'
@@ -25,9 +27,9 @@ StaticLink = Struct.new(:link, :site) do
25
27
 
26
28
  def body
27
29
  if File.exist?(link)
28
- open(link).read
30
+ File.open(link).read
29
31
  else
30
- open(file_path).read
32
+ File.open(file_path).read
31
33
  end
32
34
  end
33
35
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # Base module ValidateWebsite
2
4
  module ValidateWebsite
3
5
  # Utils class for CSS helpers
@@ -10,6 +12,7 @@ module ValidateWebsite
10
12
  def self.extract_urls_from_css(page)
11
13
  return Set[] unless page
12
14
  return Set[] if page.body.nil?
15
+
13
16
  nodes = Crass::Parser.parse_stylesheet(page.body)
14
17
  extract_urls_from_nodes nodes, page
15
18
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'uri'
2
4
 
3
5
  require 'nokogiri'
@@ -10,7 +12,7 @@ module ValidateWebsite
10
12
  class Validator
11
13
  extend ValidatorClassMethods
12
14
 
13
- @html5_validator_service_url = 'https://checker.html5.org/'
15
+ @html5_validator_service_url = 'https://validator.nu/'
14
16
  XHTML_SCHEMA_PATH = File.expand_path('../../data/schemas', __dir__)
15
17
  @mutex = Mutex.new
16
18
 
@@ -68,14 +70,17 @@ module ValidateWebsite
68
70
  # http://www.w3.org/TR/xhtml1/#dtds
69
71
  def find_namespace(dtd)
70
72
  return unless dtd.system_id
73
+
71
74
  dtd_uri = URI.parse(dtd.system_id)
72
75
  return unless dtd_uri.path
76
+
73
77
  @dtd_uri = dtd_uri
74
78
  File.basename(@dtd_uri.path, '.dtd')
75
79
  end
76
80
 
77
81
  def document
78
82
  return @document if @document
83
+
79
84
  @document = if @dtd_uri && @body.match(@dtd_uri.to_s)
80
85
  @body.sub(@dtd_uri.to_s, @namespace + '.dtd')
81
86
  else
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'tidy_ffi'
2
4
 
3
5
  # Validator Class Methods
@@ -9,6 +11,7 @@ module ValidatorClassMethods
9
11
 
10
12
  def tidy
11
13
  return @tidy if defined?(@tidy)
14
+
12
15
  @lib_tidy = TidyFFI::LibTidy
13
16
  @tidy = TidyFFI::Tidy
14
17
  rescue TidyFFI::LibTidyNotInstalled
@@ -2,5 +2,5 @@
2
2
 
3
3
  # Version file for ValidateWebsite
4
4
  module ValidateWebsite
5
- VERSION = '1.9.3'.freeze
5
+ VERSION = '1.10.0'
6
6
  end
@@ -1,9 +1,11 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require File.expand_path('test_helper', __dir__)
2
4
 
3
5
  describe ValidateWebsite::Core do
4
6
  describe 'invalid options' do
5
7
  it 'raise ArgumentError on wrong validation_type' do
6
- proc { ValidateWebsite::Core.new({ color: false }, :fail) }
8
+ _(proc { ValidateWebsite::Core.new({ color: false }, :fail) })
7
9
  .must_raise ArgumentError
8
10
  end
9
11
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require File.expand_path('test_helper', __dir__)
2
4
 
3
5
  # rubocop:disable Metrics/BlockLength
@@ -21,7 +23,7 @@ describe ValidateWebsite::Crawl do
21
23
  _out, _err = capture_io do
22
24
  v = ValidateWebsite::Crawl.new(site: TEST_DOMAIN, user_agent: ua)
23
25
  v.crawl
24
- v.crawler.user_agent.must_equal ua
26
+ _(v.crawler.user_agent).must_equal ua
25
27
  end
26
28
  end
27
29
 
@@ -31,7 +33,7 @@ describe ValidateWebsite::Crawl do
31
33
  _out, _err = capture_io do
32
34
  ValidateWebsite::Crawl.new(site: TEST_DOMAIN,
33
35
  html5_validator_service_url: new)
34
- validator.html5_validator_service_url.must_equal new
36
+ _(validator.html5_validator_service_url).must_equal new
35
37
  validator.html5_validator_service_url = original
36
38
  end
37
39
  end
@@ -43,7 +45,8 @@ describe ValidateWebsite::Crawl do
43
45
  _out, _err = capture_io do
44
46
  v = ValidateWebsite::Crawl.new(site: TEST_DOMAIN, cookies: cookies)
45
47
  v.crawl
46
- v.crawler.cookies.cookies_for_host(v.host).must_equal v.default_cookies
48
+ _(v.crawler.cookies.cookies_for_host(v.host))
49
+ .must_equal v.default_cookies
47
50
  end
48
51
  end
49
52
  end
@@ -53,37 +56,53 @@ describe ValidateWebsite::Crawl do
53
56
  name = 'xhtml1-strict'
54
57
  file = File.join('test', 'data', "#{name}.html")
55
58
  page = FakePage.new(name,
56
- body: open(file).read,
59
+ body: File.open(file).read,
57
60
  content_type: 'text/html')
58
61
  @validate_website.site = page.url
59
62
  _out, _err = capture_io do
60
63
  @validate_website.crawl
61
64
  end
62
- @validate_website.history_count.must_equal 5
65
+ _(@validate_website.history_count).must_equal 5
63
66
  end
64
67
 
65
68
  it 'extract link' do
66
69
  name = 'html4-strict'
67
70
  file = File.join('test', 'data', "#{name}.html")
68
71
  page = FakePage.new(name,
69
- body: open(file).read,
72
+ body: File.open(file).read,
70
73
  content_type: 'text/html')
71
74
  @validate_website.site = page.url
72
75
  _out, _err = capture_io do
73
76
  @validate_website.crawl
74
77
  end
75
- @validate_website.history_count.must_equal 98
78
+ _(@validate_website.history_count).must_equal 98
79
+ end
80
+
81
+ it 'can change validator' do
82
+ name = 'html5-fail'
83
+ file = File.join('test', 'data', "#{name}.html")
84
+ page = FakePage.new(name,
85
+ body: File.open(file).read,
86
+ content_type: 'text/html')
87
+ validator_res = File.join('test', 'data', 'validator.nu-failure.json')
88
+ stub_request(:any, /#{validator.html5_validator_service_url}/)
89
+ .to_return(body: File.open(validator_res).read)
90
+ @validate_website.site = page.url
91
+ _out, _err = capture_io do
92
+ @validate_website.crawl(html5_validator: :nu, ignore: /Warning/)
93
+ end
94
+ _(@validate_website.errors_count).must_equal 1
76
95
  end
77
96
 
78
97
  it 'crawl when URLs are not ascii only' do
79
98
  name = 'cozy-community'
80
99
  file = File.join('test', 'data', "#{name}.html")
81
100
  page = FakePage.new(name,
82
- body: open(file).read,
101
+ body: File.open(file).read,
83
102
  content_type: 'text/html')
84
103
  validator_res = File.join('test', 'data', 'validator.nu-failure.json')
85
104
  stub_request(:any, /#{validator.html5_validator_service_url}/)
86
- .to_return(body: open(validator_res).read)
105
+ .to_return(body: File.open(validator_res).read)
87
106
  @validate_website.site = page.url
88
107
  _out, _err = capture_io do
89
108
  @validate_website.crawl
@@ -119,7 +138,7 @@ describe ValidateWebsite::Crawl do
119
138
  _out, _err = capture_io do
120
139
  @validate_website.crawl
121
140
  end
122
- @validate_website.history_count.must_equal 5
141
+ _(@validate_website.history_count).must_equal 5
123
142
  end
124
143
 
125
144
  it 'should extract url with single quote' do
@@ -130,7 +149,7 @@ describe ValidateWebsite::Crawl do
130
149
  _out, _err = capture_io do
131
150
  @validate_website.crawl
132
151
  end
133
- @validate_website.history_count.must_equal 2
152
+ _(@validate_website.history_count).must_equal 2
134
153
  end
135
154
 
136
155
  it 'should extract url with double quote' do
@@ -141,7 +160,7 @@ describe ValidateWebsite::Crawl do
141
160
  _out, _err = capture_io do
142
161
  @validate_website.crawl
143
162
  end
144
- @validate_website.history_count.must_equal 2
163
+ _(@validate_website.history_count).must_equal 2
145
164
  end
146
165
 
147
166
  it 'should extract url with params' do
@@ -152,7 +171,7 @@ describe ValidateWebsite::Crawl do
152
171
  _out, _err = capture_io do
153
172
  @validate_website.crawl
154
173
  end
155
- @validate_website.history_count.must_equal 2
174
+ _(@validate_website.history_count).must_equal 2
156
175
  end
157
176
 
158
177
  it 'should not extract invalid urls' do
@@ -163,7 +182,7 @@ describe ValidateWebsite::Crawl do
163
182
  _out, _err = capture_io do
164
183
  @validate_website.crawl
165
184
  end
166
- @validate_website.history_count.must_equal 1
185
+ _(@validate_website.history_count).must_equal 1
167
186
  end
168
187
  end
169
188
 
@@ -182,7 +201,7 @@ describe ValidateWebsite::Crawl do
182
201
  _out, _err = capture_io do
183
202
  @validate_website.crawl
184
203
  end
185
- @validate_website.errors_count.must_equal 1
204
+ _(@validate_website.errors_count).must_equal 1
186
205
  end
187
206
 
188
207
  it 'should be invalid with syntax error' do
@@ -193,7 +212,7 @@ describe ValidateWebsite::Crawl do
193
212
  _out, _err = capture_io do
194
213
  @validate_website.crawl
195
214
  end
196
- @validate_website.errors_count.must_equal 1
215
+ _(@validate_website.errors_count).must_equal 1
197
216
  end
198
217
  end
199
218
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require File.expand_path('test_helper', __dir__)
2
4
 
3
5
  # rubocop:disable Metrics/BlockLength
@@ -17,7 +19,7 @@ describe ValidateWebsite::Static do
17
19
  not_found: false,
18
20
  exclude: /data|example/)
19
21
  end
20
- @validate_website.history_count.must_equal 0
22
+ _(@validate_website.history_count).must_equal 0
21
23
  end
22
24
 
23
25
  it 'no space in directory name' do
@@ -28,7 +30,7 @@ describe ValidateWebsite::Static do
28
30
  markup: false,
29
31
  not_found: false)
30
32
  end
31
- @validate_website.not_founds_count.must_equal 0
33
+ _(@validate_website.not_founds_count).must_equal 0
32
34
  end
33
35
 
34
36
  it 'not found' do
@@ -40,7 +42,25 @@ describe ValidateWebsite::Static do
40
42
  markup: false,
41
43
  not_found: true)
42
44
  end
43
- @validate_website.not_founds_count.must_equal 213
45
+ _(@validate_website.not_founds_count).must_equal 213
46
+ end
47
+ end
48
+
49
+ it 'can change validator' do
50
+ validator_res = File.join('test', 'data', 'validator.nu-failure.json')
51
+ stub_request(:any,
52
+ /#{ValidateWebsite::Validator.html5_validator_service_url}/)
53
+ .to_return(body: File.open(validator_res).read)
54
+ pattern = File.join(File.dirname(__FILE__), 'data',
55
+ 'html5-fail.html')
56
+ Dir.chdir('test/data') do
57
+ _out, _err = capture_io do
58
+ @validate_website.crawl(pattern: pattern,
59
+ site: 'http://w3.org/',
60
+ ignore: /Warning/,
61
+ html5_validator: :nu)
62
+ end
63
+ _(@validate_website.errors_count).must_equal 1
44
64
  end
45
65
  end
46
66
 
@@ -53,7 +73,7 @@ describe ValidateWebsite::Static do
53
73
  site: 'http://w3.org/',
54
74
  ignore: /height|width|Length/)
55
75
  end
56
- @validate_website.errors_count.must_equal 0
76
+ _(@validate_website.errors_count).must_equal 0
57
77
  end
58
78
  end
59
79
 
@@ -67,7 +87,7 @@ describe ValidateWebsite::Static do
67
87
  markup: false,
68
88
  css_syntax: true)
69
89
  end
70
- @validate_website.errors_count.must_equal 1
90
+ _(@validate_website.errors_count).must_equal 1
71
91
  end
72
92
  end
73
93
  end
@@ -4,16 +4,15 @@ begin
4
4
  require 'coveralls'
5
5
  Coveralls.wear!
6
6
  rescue LoadError
7
- STDERR.puts 'coveralls not loaded'
7
+ warn 'coveralls not loaded'
8
8
  end
9
9
 
10
10
  require 'minitest/autorun'
11
- require 'minitest/focus'
12
11
  require 'spidr'
13
12
 
14
13
  require 'validate_website/core'
15
14
 
16
15
  require File.expand_path('webmock_helper', __dir__)
17
16
 
18
- TEST_DOMAIN = 'http://www.example.com/'.freeze
17
+ TEST_DOMAIN = 'http://www.example.com/'
19
18
  ENV['LC_ALL'] = 'C.UTF-8' if defined?(RUBY_ENGINE) && RUBY_ENGINE == 'jruby'
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require File.expand_path('test_helper', __dir__)
2
4
 
3
5
  # rubocop:disable Metrics/BlockLength
@@ -14,15 +16,15 @@ describe ValidateWebsite::Validator do
14
16
  name = 'w3.org-xhtml1-strict-errors'
15
17
  file = File.join('test', 'data', "#{name}.html")
16
18
  page = FakePage.new(name,
17
- body: open(file).read,
19
+ body: File.open(file).read,
18
20
  content_type: 'text/html')
19
21
  @xhtml1_page = @http.get_page(page.url)
20
22
  ignore = /width|height|Length/
21
23
  validator = subject.new(@xhtml1_page.doc,
22
24
  @xhtml1_page.body,
23
25
  ignore: ignore)
24
- validator.valid?.must_equal true
25
- validator.errors.must_equal []
26
+ _(validator.valid?).must_equal true
27
+ _(validator.errors).must_equal []
26
28
  end
27
29
 
28
30
  it 'xhtml1-strict should be valid' do
@@ -30,17 +32,17 @@ describe ValidateWebsite::Validator do
30
32
  dtd_uri = 'http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd'
31
33
  file = File.join('test', 'data', "#{name}.html")
32
34
  page = FakePage.new(name,
33
- body: open(file).read,
35
+ body: File.open(file).read,
34
36
  content_type: 'text/html')
35
37
  @xhtml1_page = @http.get_page(page.url)
36
38
  ignore = /width|height|Length/
37
39
  validator = subject.new(@xhtml1_page.doc,
38
40
  @xhtml1_page.body,
39
41
  ignore: ignore)
40
- validator.dtd.system_id.must_equal dtd_uri
41
- validator.namespace.must_equal name
42
- validator.valid?.must_equal true
43
- validator.errors.must_equal []
42
+ _(validator.dtd.system_id).must_equal dtd_uri
43
+ _(validator.namespace).must_equal name
44
+ _(validator.valid?).must_equal true
45
+ _(validator.errors).must_equal []
44
46
  end
45
47
  end
46
48
 
@@ -49,18 +51,18 @@ describe ValidateWebsite::Validator do
49
51
  before do
50
52
  validator_res = File.join('test', 'data', 'validator.nu-success.json')
51
53
  stub_request(:any, /#{subject.html5_validator_service_url}/)
52
- .to_return(body: open(validator_res).read)
54
+ .to_return(body: File.open(validator_res).read)
53
55
  end
54
56
  it 'html5 should be valid' do
55
57
  name = 'html5'
56
58
  file = File.join('test', 'data', "#{name}.html")
57
59
  page = FakePage.new(name,
58
- body: open(file).read,
60
+ body: File.open(file).read,
59
61
  content_type: 'text/html')
60
62
  @html5_page = @http.get_page(page.url)
61
63
  validator = subject.new(@html5_page.doc,
62
64
  @html5_page.body)
63
- validator.valid?.must_equal true
65
+ _(validator.valid?).must_equal true
64
66
  end
65
67
  end
66
68
 
@@ -68,11 +70,11 @@ describe ValidateWebsite::Validator do
68
70
  before do
69
71
  validator_res = File.join('test', 'data', 'validator.nu-failure.json')
70
72
  stub_request(:any, /#{subject.html5_validator_service_url}/)
71
- .to_return(body: open(validator_res).read)
73
+ .to_return(body: File.open(validator_res).read)
72
74
  name = 'html5-fail'
73
75
  file = File.join('test', 'data', "#{name}.html")
74
76
  page = FakePage.new(name,
75
- body: open(file).read,
77
+ body: File.open(file).read,
76
78
  content_type: 'text/html')
77
79
  @html5_page = @http.get_page(page.url)
78
80
  end
@@ -82,8 +84,8 @@ describe ValidateWebsite::Validator do
82
84
  validator = subject.new(@html5_page.doc,
83
85
  @html5_page.body,
84
86
  html5_validator: :nu)
85
- validator.valid?.must_equal false
86
- validator.errors.size.must_equal 3
87
+ _(validator.valid?).must_equal false
88
+ _(validator.errors.size).must_equal 3
87
89
  end
88
90
 
89
91
  it 'should exclude errors ignored by :ignore option' do
@@ -92,8 +94,8 @@ describe ValidateWebsite::Validator do
92
94
  @html5_page.body,
93
95
  ignore: ignore,
94
96
  html5_validator: :nu)
95
- validator.valid?.must_equal false
96
- validator.errors.size.must_equal 1
97
+ _(validator.valid?).must_equal false
98
+ _(validator.errors.size).must_equal 1
97
99
  end
98
100
  end
99
101
 
@@ -101,8 +103,8 @@ describe ValidateWebsite::Validator do
101
103
  it 'should have an array of errors' do
102
104
  validator = subject.new(@html5_page.doc,
103
105
  @html5_page.body)
104
- validator.valid?.must_equal false
105
- validator.errors.size.must_equal 3
106
+ _(validator.valid?).must_equal false
107
+ _(validator.errors.size).must_equal 3
106
108
  end
107
109
 
108
110
  it 'should exclude errors ignored by :ignore option' do
@@ -110,8 +112,8 @@ describe ValidateWebsite::Validator do
110
112
  validator = subject.new(@html5_page.doc,
111
113
  @html5_page.body,
112
114
  ignore: ignore)
113
- validator.valid?.must_equal false
114
- validator.errors.size.must_equal 2
115
+ _(validator.valid?).must_equal false
116
+ _(validator.errors.size).must_equal 2
115
117
  end
116
118
  end
117
119
  end
@@ -122,13 +124,13 @@ describe ValidateWebsite::Validator do
122
124
  name = 'html4-strict'
123
125
  file = File.join('test', 'data', "#{name}.html")
124
126
  page = FakePage.new(name,
125
- body: open(file).read,
127
+ body: File.open(file).read,
126
128
  content_type: 'text/html')
127
129
  @html4_strict_page = @http.get_page(page.url)
128
130
  validator = subject.new(@html4_strict_page.doc,
129
131
  @html4_strict_page.body)
130
132
  validator.valid?
131
- validator.errors.must_equal []
133
+ _(validator.errors).must_equal []
132
134
  end
133
135
  end
134
136
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'webmock/minitest'
2
4
 
3
5
  # FakePage html helper for webmock
@@ -27,8 +29,8 @@ class FakePage
27
29
 
28
30
  def create_body
29
31
  @body = '<html><body>'
30
- @links.each { |l| @body += "<a href=\"#{TEST_DOMAIN}#{l}\"></a>" } if @links
31
- @hrefs.each { |h| @body += "<a href=\"#{h}\"></a>" } if @hrefs
32
+ @links&.each { |l| @body += "<a href=\"#{TEST_DOMAIN}#{l}\"></a>" }
33
+ @hrefs&.each { |h| @body += "<a href=\"#{h}\"></a>" }
32
34
  @body += '</body></html>'
33
35
  end
34
36
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: validate-website
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.9.3
4
+ version: 1.10.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Laurent Arnoud
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-04-11 00:00:00.000000000 Z
11
+ date: 2020-07-03 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: crass
@@ -114,28 +114,14 @@ dependencies:
114
114
  requirements:
115
115
  - - "~>"
116
116
  - !ruby/object:Gem::Version
117
- version: '5'
117
+ version: '5.0'
118
118
  type: :development
119
119
  prerelease: false
120
120
  version_requirements: !ruby/object:Gem::Requirement
121
121
  requirements:
122
122
  - - "~>"
123
123
  - !ruby/object:Gem::Version
124
- version: '5'
125
- - !ruby/object:Gem::Dependency
126
- name: minitest-focus
127
- requirement: !ruby/object:Gem::Requirement
128
- requirements:
129
- - - "~>"
130
- - !ruby/object:Gem::Version
131
- version: '1'
132
- type: :development
133
- prerelease: false
134
- version_requirements: !ruby/object:Gem::Requirement
135
- requirements:
136
- - - "~>"
137
- - !ruby/object:Gem::Version
138
- version: '1'
124
+ version: '5.0'
139
125
  - !ruby/object:Gem::Dependency
140
126
  name: rake
141
127
  requirement: !ruby/object:Gem::Requirement
@@ -156,14 +142,14 @@ dependencies:
156
142
  requirements:
157
143
  - - "~>"
158
144
  - !ruby/object:Gem::Version
159
- version: 0.58.0
145
+ version: 0.72.0
160
146
  type: :development
161
147
  prerelease: false
162
148
  version_requirements: !ruby/object:Gem::Requirement
163
149
  requirements:
164
150
  - - "~>"
165
151
  - !ruby/object:Gem::Version
166
- version: 0.58.0
152
+ version: 0.72.0
167
153
  - !ruby/object:Gem::Dependency
168
154
  name: webmock
169
155
  requirement: !ruby/object:Gem::Requirement
@@ -273,15 +259,14 @@ required_ruby_version: !ruby/object:Gem::Requirement
273
259
  requirements:
274
260
  - - ">="
275
261
  - !ruby/object:Gem::Version
276
- version: 2.2.0
262
+ version: 2.3.0
277
263
  required_rubygems_version: !ruby/object:Gem::Requirement
278
264
  requirements:
279
265
  - - ">="
280
266
  - !ruby/object:Gem::Version
281
267
  version: '0'
282
268
  requirements: []
283
- rubyforge_project:
284
- rubygems_version: 2.7.6.2
269
+ rubygems_version: 3.1.2
285
270
  signing_key:
286
271
  specification_version: 4
287
272
  summary: Web crawler for checking the validity of your documents