ReadRobotstxt 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 38847ae783023e434f2b98afa2e27fdbbf4e1925a395e425cb4fba95e94bd31c
4
- data.tar.gz: 1191a2352f6ed2792ce8f34f5f5611b2a0d7a41fb6e1bf1de0857b0757f54a51
3
+ metadata.gz: 77e4a680402a0213053b6cc3b7fa825a332a9e6d8c61a42842fa9461dd814c79
4
+ data.tar.gz: 66811f73150550f72f935e19823c29a639b5a1c4d79302ae90e1b0b7884e892a
5
5
  SHA512:
6
- metadata.gz: c6221bd72dc37f2f49ee48d466d11e10c8f58116d75077aa6bd942629ac5cd8bd6129900fd2d1187c2775fb0157904f19e2e687aa3d4131ae3aea2d4fe3d5dec
7
- data.tar.gz: 8d382e1e16d44a32cc97ca4e817b08ad43c242bb210508318289646d489f9fd6ebe4343b4c479fcbccfbfb24e545106d6ab6d2d93f06a02c9293307ede9bd577
6
+ metadata.gz: 3a35d73d66bc13cc565c920601508ad591e6c7b68559713419a4e395e722e7803bcd742c1539853f2ec4ed6b441a568fb01fb2795d2f4d4fd07abcb6d73c6067
7
+ data.tar.gz: bffeb4a05a3fe498e14eee5648d7933799c88ebd167ee594d17c7016d352df369c688293d110f918d96ff77bf98e727b23d34ce616fe04574df7d18f1ae534d1
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Robots
4
- VERSION = '0.2.0'
4
+ VERSION = '0.3.0'
5
5
  end
data/lib/Robots.rb CHANGED
@@ -9,7 +9,6 @@ module Robots
9
9
 
10
10
  def initialize(url)
11
11
  @url = url
12
- puts "kkk"
13
12
  uri = URI(File.join(@url, 'robots.txt'))
14
13
  @response = Net::HTTP.get_response(uri).body.split("\n").map(&:to_s)
15
14
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ReadRobotstxt
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.0
4
+ version: 0.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Michael-Meade
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2025-10-29 00:00:00.000000000 Z
11
+ date: 2025-11-01 00:00:00.000000000 Z
12
12
  dependencies: []
13
13
  description: Read Robots.txt files
14
14
  email:
@@ -17,13 +17,8 @@ executables: []
17
17
  extensions: []
18
18
  extra_rdoc_files: []
19
19
  files:
20
- - README.md
21
- - Rakefile
22
- - ReadRobotstxt-0.1.0.gem
23
20
  - lib/Robots.rb
24
21
  - lib/Robots/version.rb
25
- - robots.rb
26
- - sig/Robots.rbs
27
22
  homepage:
28
23
  licenses: []
29
24
  metadata: {}
data/README.md DELETED
@@ -1,55 +0,0 @@
1
- # Robots
2
- Get robots.txt information from a site.
3
-
4
- ## Installation
5
-
6
- ```ruby
7
- gem install ReadRobotstxt
8
- ```
9
-
10
-
11
- ## Usage
12
-
13
- ```ruby
14
- require './lib/robots'
15
- u = Robots::Url.new('https://www.ebay.com')
16
- u.allow
17
- u.disallow
18
- u.disallow_removed
19
- u.allow_removed
20
- ```
21
-
22
-
23
- ```ruby
24
- require 'Robots'
25
-
26
- u = Robots::Url.new('https://www.ebay.com')
27
- u.allow
28
- u.disallow
29
- u.disallow_removed
30
- u.allow_removed
31
-
32
-
33
- ```
34
-
35
- ## Output
36
-
37
- ```ruby
38
- Allow: /urw/*/product-reviews/
39
- Allow: /ads.txt
40
- Allow: /b/adidas-Yeezy-Sneakers-for-Men/15709/bn_86578781?*_trkparms=*pageci:*|parentrq:*iid:0
41
- Allow: /b/Collectible-Sneakers/bn_7000259435?*_trkparms=*pageci:*|parentrq:*iid:0
42
- Allow: /b/Jordan-Sneakers-for-Men/15709/bn_96541848?*_trkparms=*pageci:*|parentrq:*iid:0
43
- Allow: /b/New-Balance-Sneakers-for-Men/15709/bn_58747?*_trkparms=*pageci:*|parentrq:*iid:0
44
- Allow: /b/Sneakers-for-Men/15709/bn_57918?*_trkparms=*pageci:*|parentrq:*iid:0
45
- Allow: /b/*?*_mwBanner
46
- Allow: /b/*?iid=*&var=
47
- Allow: /b/*?iid=*&chn=ps
48
- Allow: /b/*?iid=*&var=*&chn=ps
49
- Allow: /sch/ebayadvsearch
50
- Allow: /sch/allcategories/
51
- Allow: /sch/i.html?*&mkcid=2
52
- Allow: /sch/i.html*_sop=12
53
- Allow: /signin/$
54
- Allow: /urw/*/product-reviews/
55
- ```
data/Rakefile DELETED
@@ -1,4 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require 'bundler/gem_tasks'
4
- task default: %i[]
Binary file
data/robots.rb DELETED
@@ -1,7 +0,0 @@
1
- require 'Robots'
2
-
3
- u = Robots::Url.new('https://www.ebay.com')
4
- u.allow
5
- u.disallow
6
- u.disallow_removed
7
- u.allow_removed
data/sig/Robots.rbs DELETED
@@ -1,4 +0,0 @@
1
- module Robots
2
- VERSION: String
3
- # See the writing guide of rbs: https://github.com/ruby/rbs#guides
4
- end