reddit_bot 1.7.7 → 1.7.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +4 -4
  2. data/lib/reddit_bot.rb +3 -1
  3. data/reddit_bot.gemspec +9 -11
  4. metadata +9 -77
  5. data/.gitignore +0 -5
  6. data/Gemfile +0 -5
  7. data/README.md +0 -101
  8. data/Rakefile +0 -6
  9. data/examples/.bashrc +0 -2
  10. data/examples/.gitignore +0 -2
  11. data/examples/Gemfile.lock +0 -17
  12. data/examples/boilerplate.rb +0 -12
  13. data/examples/councilofricks/Gemfile +0 -4
  14. data/examples/councilofricks/Gemfile.lock +0 -17
  15. data/examples/councilofricks/main.rb +0 -58
  16. data/examples/cptflairbot3/.bashrc +0 -1
  17. data/examples/cptflairbot3/Code.gs +0 -13
  18. data/examples/cptflairbot3/Gemfile +0 -5
  19. data/examples/cptflairbot3/Gemfile.lock +0 -74
  20. data/examples/cptflairbot3/app.js +0 -40
  21. data/examples/cptflairbot3/casual/casualpokemontrades.htm +0 -910
  22. data/examples/cptflairbot3/casual/script.js +0 -55
  23. data/examples/cptflairbot3/casual/style.css +0 -1099
  24. data/examples/cptflairbot3/log.htm +0 -1
  25. data/examples/cptflairbot3/main.rb +0 -62
  26. data/examples/cptflairbot3/package.json +0 -6
  27. data/examples/cptflairbot3/pubsub.rb +0 -30
  28. data/examples/devflairbot/Gemfile +0 -6
  29. data/examples/devflairbot/Gemfile.lock +0 -74
  30. data/examples/devflairbot/main.rb +0 -81
  31. data/examples/dut/Gemfile +0 -4
  32. data/examples/dut/Gemfile.lock +0 -24
  33. data/examples/dut/main.rb +0 -41
  34. data/examples/get_dimensions.rb +0 -212
  35. data/examples/iostroubleshooting/Gemfile +0 -5
  36. data/examples/iostroubleshooting/Gemfile.lock +0 -16
  37. data/examples/iostroubleshooting/main.rb +0 -36
  38. data/examples/johnnymarr/Gemfile +0 -3
  39. data/examples/johnnymarr/Gemfile.lock +0 -17
  40. data/examples/johnnymarr/main.rb +0 -54
  41. data/examples/johnnymarr/twitter.rb +0 -80
  42. data/examples/largeimages/Gemfile +0 -11
  43. data/examples/largeimages/Gemfile.lock +0 -105
  44. data/examples/largeimages/main.rb +0 -173
  45. data/examples/largeimagesreview/Gemfile +0 -4
  46. data/examples/largeimagesreview/Gemfile.lock +0 -15
  47. data/examples/largeimagesreview/main.rb +0 -43
  48. data/examples/mlgtv/Gemfile +0 -4
  49. data/examples/mlgtv/Gemfile.lock +0 -23
  50. data/examples/mlgtv/channels.txt +0 -127
  51. data/examples/mlgtv/main.rb +0 -160
  52. data/examples/net_http_utils.rb +0 -148
  53. data/examples/oneplus/Gemfile +0 -5
  54. data/examples/oneplus/Gemfile.lock +0 -26
  55. data/examples/oneplus/main.rb +0 -43
  56. data/examples/realtimeww2/.bashrc +0 -1
  57. data/examples/realtimeww2/Gemfile +0 -3
  58. data/examples/realtimeww2/Gemfile.lock +0 -17
  59. data/examples/realtimeww2/main.rb +0 -129
  60. data/examples/sexypizza/Gemfile +0 -3
  61. data/examples/sexypizza/Gemfile.lock +0 -15
  62. data/examples/sexypizza/main.rb +0 -33
  63. data/examples/unisa/Gemfile +0 -4
  64. data/examples/unisa/Gemfile.lock +0 -24
  65. data/examples/unisa/main.rb +0 -41
  66. data/examples/wallpaper/Gemfile +0 -5
  67. data/examples/wallpaper/Gemfile.lock +0 -34
  68. data/examples/wallpaper/main.rb +0 -27
  69. data/examples/yayornay/Gemfile +0 -3
  70. data/examples/yayornay/Gemfile.lock +0 -15
  71. data/examples/yayornay/main.rb +0 -33
@@ -1,4 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem "reddit_bot", "1.1.5"
4
- # gem "mll"
@@ -1,15 +0,0 @@
1
- GEM
2
- remote: https://rubygems.org/
3
- specs:
4
- json (1.8.3)
5
- reddit_bot (1.1.5)
6
- json
7
-
8
- PLATFORMS
9
- ruby
10
-
11
- DEPENDENCIES
12
- reddit_bot (= 1.1.5)
13
-
14
- BUNDLED WITH
15
- 1.12.3
@@ -1,43 +0,0 @@
1
- cache = lambda do |&block|
2
- require "yaml"
3
- next YAML.load File.read "cache.yaml" if File.exist? "cache.yaml"
4
- block.call.tap do |data|
5
- File.write "cache.yaml", YAML.dump(data)
6
- end
7
- end
8
-
9
-
10
- require_relative "../boilerplate"
11
- BOT = RedditBot::Bot.new YAML.load File.read "secrets.yaml"
12
-
13
- SUBREDDIT = "largeimages"
14
-
15
- table = cache.call do
16
- BOT.json(:get, "/r/#{SUBREDDIT}/about/log", [["limit", ENV["LIMIT"] || 500]])["data"]["children"].map do |child|
17
- fail child unless child["kind"] == "modaction"
18
- next unless %w{ removelink approvelink }.include? child["data"]["action"]
19
- title = BOT.json(:get, "/api/info", [["id", child["data"]["target_fullname"]]])["data"]["children"][0]["data"]["title"]
20
- [child["data"]["action"], title[/(?<=^\[)\d+x\d+/], title[/[^\/]+$/]]
21
- end.compact
22
- end
23
-
24
- report = table.group_by(&:last).sort_by{ |_, group| -group.size }.map do |sub, group|
25
- good = (group.group_by(&:first)["approvelink"] || []).size
26
- [
27
- sub, "Total: #{group.size}",
28
- "Quality: #{good * 100 / group.size}%",
29
- group.sort_by do |_, resolution, |
30
- x, y = resolution.scan(/\d+/).map(&:to_i)
31
- x * y
32
- end.map do |status, |
33
- {"approvelink"=>?✅, "removelink"=>?⛔}[status] || ??
34
- end.join,
35
- ]
36
- end
37
-
38
- widths = report.transpose.map{ |column| column.map(&:size).max }
39
- report.each do |row|
40
- puts [row, widths, %i{ center ljust ljust ljust }].transpose.map{ |string, width, alignment|
41
- " #{string.send(alignment, width)} "
42
- }.join
43
- end
@@ -1,4 +0,0 @@
1
- source "https://rubygems.org"
2
-
3
- gem "reddit_bot", "~>1.3.0"
4
- gem "nethttputils", git: "git@github.com:Nakilon/nethttputils.git", tag: "v0.0.4.1"
@@ -1,23 +0,0 @@
1
- GIT
2
- remote: git@github.com:Nakilon/nethttputils.git
3
- revision: 84ac32db6d44b68f5bd6e2f1ce8698fc5e2ad968
4
- tag: v0.0.4.1
5
- specs:
6
- nethttputils (0.0.4.1)
7
-
8
- GEM
9
- remote: https://rubygems.org/
10
- specs:
11
- json (2.1.0)
12
- reddit_bot (1.3.0)
13
- json
14
-
15
- PLATFORMS
16
- ruby
17
-
18
- DEPENDENCIES
19
- nethttputils!
20
- reddit_bot (~> 1.3.0)
21
-
22
- BUNDLED WITH
23
- 1.16.0
@@ -1,127 +0,0 @@
1
- Ricky
2
- ACHES
3
- Lacefield
4
- Clayster
5
- Enable
6
- Zoomaa
7
- Attach
8
- TheFEARS
9
- MiRx1
10
- SaintsRF
11
- StuDyy
12
- SpaceLyTV
13
- NAMELESS
14
- Scump
15
- FORMAL
16
- Crimsix
17
- Karma
18
- Loony
19
- Slacked
20
- Octane
21
- MJChino
22
- Diabolic_TV
23
- ImTheIvy
24
- Senderxz
25
- Jkap
26
- John
27
- SlasheRAL
28
- Apathy
29
- ColtHavok
30
- MikeSwarley
31
- Parasite
32
- TyreeLegal
33
- Silly
34
- Blfire
35
- methodz
36
- TwiZzyTV
37
- Mochila
38
- Remy
39
- Xotic16
40
- AquA
41
- Faccento
42
- Nagafen
43
- Tylerfelo
44
- TheoryCoD
45
- ColeChanTV
46
- happyy97
47
- goonjar
48
- Burns
49
- Dedo
50
- Neslo
51
- TeeCM
52
- K1lla93
53
- NeLsoNNaTeR
54
- ProoFy
55
- Whea7s
56
- MBoZe
57
- Merk
58
- Nadeshot
59
- ReeP
60
- Sharp
61
- TeePee
62
- Braaain2015
63
- Nolsonn
64
- QwiKeRTHaNu
65
- Zedenyer1
66
- Jurd
67
- Tommey
68
- Swanny
69
- MadCatEU
70
- Rated_EU1
71
- BsportJoshh
72
- VortexSA
73
- Sy_Vortex
74
- TheMarkyB
75
- Peatie95
76
- urbandm
77
- TreiZer0
78
- iDqvee
79
- Tojor
80
- MethodZ_TV
81
- Gotaga
82
- WailersWL
83
- TCM_Moose
84
- skrapzq
85
- Reedy
86
- fighta71
87
- Swiftazor
88
- NakeeeZe
89
- BeastnGOD
90
- Dizmull
91
- MitchBuZZO
92
- YKEminence
93
- BacabecNZ
94
- Zeuss_Gaming
95
- Hopeyy
96
- GuydraCOD
97
- mattmrx
98
- Maven
99
- CouRageJD
100
- Revan
101
- BriceyHD
102
- Benson
103
- PHILWHI7
104
- CallofDuty
105
- MLGcod
106
- MLG
107
- mlgbravo
108
- MLGcharlie
109
- MLG_Delta
110
- MLG_FR
111
- MLG_German
112
- MLG_ITL
113
- MES
114
- Multiplay_CoD
115
- UMGEvents
116
- GfinityTV
117
- WorldGaming
118
- FemaleProLeague
119
- Arcitys
120
- Prestinni
121
- Maux
122
- priestahh
123
- Vilesyder
124
- benbance
125
- JordonGeneral
126
- dreeall
127
- CESNLive
@@ -1,160 +0,0 @@
1
- require_relative File.join "../boilerplate"
2
-
3
- BOT = RedditBot::Bot.new YAML.load_file "secrets.yaml"
4
-
5
- SUBREDDIT = "codcompetitive"
6
-
7
- loop do
8
- Hearthbeat.beat "u_OpTicNaDeBoT_r_CoDCompetitive", 70 unless Gem::Platform.local.os == "darwin"
9
- catch :loop do
10
-
11
- text = " Live Streams\n\n" + [].tap do |list|
12
-
13
- throw :loop unless statuses = JSON.parse( begin
14
- NetHTTPUtils.request_data("http://streamapi.majorleaguegaming.com/service/streams/all")[/\{.+\}/m]
15
- rescue NetHTTPUtils::Error => e
16
- fail unless e.code == 408
17
- puts 408
18
- sleep 60
19
- retry
20
- end )["data"]["items"]
21
- games = JSON.parse(
22
- NetHTTPUtils.request_data("http://www.majorleaguegaming.com/api/games/all")[/\{.+\}/m]
23
- )["data"]["items"]
24
- begin
25
- JSON.parse begin
26
- NetHTTPUtils.request_data("http://www.majorleaguegaming.com/api/channels/all?fields=name,url,tags,stream_name,game_id")
27
- rescue NetHTTPUtils::Error => e
28
- fail unless e.code == 404
29
- puts 404
30
- sleep 60
31
- retry
32
- end
33
- rescue JSON::ParserError
34
- puts "JSON::ParserError"
35
- sleep 60
36
- retry
37
- end["data"]["items"].each do |item1|
38
- next unless item1["tags"].include? "COD Pro League"
39
- status = statuses.find{ |item2| item1["stream_name"] == item2["stream_name"] }
40
- next unless status && status["status"] > 0
41
- game = games.find{ |game| game["id"] == item1["game_id"] }
42
- list << "* [](#mlg) [](##{
43
- ["?", "live", "replay"][status["status"]]
44
- }) #{
45
- "[](##{ {
46
- "Call of Duty: Modern Warfare 2" => "codmw2",
47
- "Call of Duty: Modern Warfare 3" => "codmw3",
48
- "Call of Duty: Black Ops" => "codbo12",
49
- "Call of Duty: Black Ops II" => "codbo2",
50
- "Call of Duty: Black Ops III" => "codbo3",
51
- "Call of Duty: Advanced Warfare" => "codaw",
52
- "Call of Duty: Ghosts" => "codghosts2",
53
- "Call of Duty: Infinite Warfare" => "codiw",
54
- }[game["name"]] }) " if game
55
- }[**#{
56
- item1["name"]
57
- }**](#{
58
- item1["url"]
59
- })"
60
- end
61
-
62
- # to update access_token:
63
- # 0. see 'client_id' here https://www.twitch.tv/settings/connections and 'client_secret' from local ./readme file
64
- # 1. get 'code' by visiting in browser: https://api.twitch.tv/kraken/oauth2/authorize?response_type=code&client_id=*******&redirect_uri=http://www.example.com/unused/redirect/uri&scope=channel_read channel_feed_read
65
- # 2. NetHTTPUtils.request_data("https://api.twitch.tv/kraken/oauth2/token", :post, form: {client_id: "*******", client_secret: "*****", grant_type: "authorization_code", redirect_uri: "http://www.example.com/unused/redirect/uri", code: "*******"})
66
- twitch = lambda do |url|
67
- max = 1000
68
- data_key = "top"
69
- next_key = "_links"
70
-
71
- request = lambda do |url, acc|
72
- uri = URI.parse url
73
- query = Hash[URI.decode_www_form uri.query || ""]
74
- # query.merge!({ "limit" => max }) if max
75
- uri.query = URI.encode_www_form query.merge( {
76
- "access_token" => File.read("twitch.token").strip,
77
- "client_id" => File.read("client.id").strip,
78
- } )
79
- json = JSON.parse NetHTTPUtils.request_data uri.to_s
80
- unless json[data_key]
81
- pp json
82
- fail
83
- end
84
- acc = acc + json[data_key]
85
- next acc.take max if max && max <= acc.size
86
- request[json[next_key]["next"], acc]
87
- end
88
-
89
- request[url, []]
90
- end
91
- # ? absent on twitch ? "Call of Duty: Modern Warfare 2" => "codmw2"
92
- # t = twitch["https://api.twitch.tv/kraken/games/top?limit=100"].map{ |hash| fail hash.keys.to_s unless hash.keys == %w{ game viewers channels }; hash.values.first["name"] }
93
- # pp t.grep("/call of duty/i")
94
- # pp t.grep("/warfare/i")
95
- # ? absent in css ? "Call of Duty: United Offensive"
96
- {
97
- "Call of Duty: Infinite Warfare" => "codiw",
98
- "Call of Duty: Modern Warfare Remastered" => "cod4",
99
- "Call of Duty 4: Modern Warfare" => "cod4",
100
- "Call of Duty: Modern Warfare 3" => "codmw3",
101
- "Call of Duty: Black Ops" => "codbo12",
102
- "Call of Duty: Black Ops II" => "codbo2",
103
- "Call of Duty: Black Ops III" => "codbo3",
104
- "Call of Duty: Advanced Warfare" => "codaw",
105
- "Call of Duty: Ghosts" => "codghosts2",
106
- "Call of Duty: World at War" => "codwaw",
107
- "Call of Duty: WWII" => "codwwii",
108
- "Modern Warfare 2" => "codmw2",
109
- }.each do |game, css|
110
- (begin
111
- require "cgi"
112
- begin
113
- t = NetHTTPUtils.get_response "https://api.twitch.tv/kraken/streams?game=#{CGI::escape game}&access_token=#{File.read("twitch.token").strip}&client_id=#{File.read("client.id").strip}&channel=#{File.read("channels.txt").split.join ?,}"
114
- end while t.code == 500
115
- JSON.parse t.body
116
- rescue JSON::ParserError
117
- puts "JSON::ParserError"
118
- sleep 60
119
- retry
120
- end["streams"] || []).each do |channel|
121
- list << "* [](#twitch) [](#live) #{
122
- "[](##{css}) "
123
- }[**#{
124
- channel["channel"]["display_name"]
125
- }**](#{
126
- channel["channel"]["url"]
127
- })"
128
- end
129
- end
130
-
131
- end.join(" \n") + "\n"
132
-
133
- settings = BOT.json(:get, "/r/#{SUBREDDIT}/about/edit")["data"]
134
- # https://github.com/praw-dev/praw/blob/c45e5f6ca0c5cd9968b51301989eb82740f8dc85/praw/__init__.py#L1592
135
- settings.store "sr", settings.delete("subreddit_id")
136
- settings.store "lang", settings.delete("language")
137
- settings.store "link_type", settings.delete("content_options")
138
- settings.store "type", settings.delete("subreddit_type")
139
- settings.store "header-title", settings.delete("header_hover_text") || ""
140
- settings["domain"] ||= ""
141
- settings["submit_link_label"] ||= ""
142
- settings["submit_text_label"] ||= ""
143
- settings["allow_top"] = settings["allow_top"]
144
- settings.delete "default_set"
145
-
146
- prefix, postfix = CGI.unescapeHTML(settings["description"]).split(/(?<=\n#####)\s*Live Streams.+?(?=\n#+)/im)
147
- unless postfix
148
- puts "!!! can't parse sidebar !!!"
149
- throw :loop
150
- end
151
- next puts "nothing to change" if prefix + text + postfix == CGI.unescapeHTML(settings["description"])
152
-
153
- puts "updating sidebar..."
154
- settings["description"] = prefix + text + postfix
155
- _ = BOT.json :post, "/api/site_admin", settings.to_a
156
- fail _.inspect if _ != {"json"=>{"errors"=>[]}} && !(_["json"]["errors"].map(&:first) - ["BAD_CAPTCHA"]).empty?
157
-
158
- end
159
- sleep 300
160
- end
@@ -1,148 +0,0 @@
1
- # TODO deprecate in favor of the gem nethttputils
2
-
3
- require "net/http"
4
- require "openssl"
5
-
6
- require "logger"
7
-
8
-
9
- module NetHTTPUtils
10
- class << self
11
-
12
- attr_accessor :logger
13
-
14
- # private?
15
- def get_response url, mtd = :get, form: {}, header: [], auth: nil, timeout: 30, patch_request: nil, &block
16
- # form = Hash[form.map{ |k, v| [k.to_s, v] }]
17
- uri = URI.parse url
18
- cookies = {}
19
- prepare_request = lambda do |uri|
20
- case mtd
21
- when :get ; Net::HTTP::Get
22
- when :post ; Net::HTTP::Post
23
- when :put ; Net::HTTP::Put
24
- when :delete ; Net::HTTP::Delete
25
- else ; raise "unknown method #{mtd}"
26
- end.new(uri).tap do |request| # somehow Get eats even raw url, not URI object
27
- patch_request.call uri, form, request if patch_request
28
- request.basic_auth *auth if auth
29
- header.each{ |k, v| request[k] = v }
30
- request["cookie"] = [*request["cookie"], cookies.map{ |k, v| "#{k}=#{v}" }].join "; " unless cookies.empty?
31
- request.set_form_data form unless form.empty?
32
- stack = caller.reverse.map do |level|
33
- /((?:[^\/:]+\/)?[^\/:]+):([^:]+)/.match(level).captures
34
- end.chunk(&:first).map do |file, group|
35
- "#{file}:#{group.map(&:last).chunk{|_|_}.map(&:first).join(",")}"
36
- end
37
- logger.info request.path
38
- logger.debug request.each_header.to_a.to_s
39
- logger.debug stack.join " -> "
40
- logger.debug request
41
- end
42
- end
43
- request = prepare_request[uri]
44
- start_http = lambda do |uri|
45
- begin
46
- Net::HTTP.start(
47
- uri.host, uri.port,
48
- use_ssl: uri.scheme == "https",
49
- verify_mode: OpenSSL::SSL::VERIFY_NONE,
50
- # read_timeout: 5,
51
- ).tap do |http|
52
- http.read_timeout = timeout #if timeout
53
- http.open_timeout = timeout #if timeout
54
- http.set_debug_output STDERR if logger.level == Logger::DEBUG # use `logger.debug?`?
55
- end
56
- rescue Errno::ECONNREFUSED => e
57
- e.message.concat " to #{uri}" # puts "#{e} to #{uri}"
58
- raise e
59
- rescue Errno::EHOSTUNREACH, Errno::ENETUNREACH, Errno::ECONNRESET, SocketError, OpenSSL::SSL::SSLError => e
60
- logger.warn "retrying in 5 seconds because of #{e.class}"
61
- sleep 5
62
- retry
63
- rescue Errno::ETIMEDOUT
64
- logger.warn "ETIMEDOUT, retrying in 5 minutes"
65
- sleep 300
66
- retry
67
- end
68
- end
69
- http = start_http[uri]
70
- do_request = lambda do |request|
71
- response = begin
72
- http.request request, &block
73
- rescue Errno::ECONNRESET, Errno::ECONNREFUSED, Net::ReadTimeout, Net::OpenTimeout, Zlib::BufError, OpenSSL::SSL::SSLError => e
74
- logger.error "retrying in 30 seconds because of #{e.class} at: #{request.uri}"
75
- sleep 30
76
- retry
77
- end
78
- response.to_hash.fetch("set-cookie", []).each{ |c| k, v = c.split(?=); cookies[k] = v[/[^;]+/] }
79
- case response.code
80
- when /\A3\d\d$/
81
- logger.info "redirect: #{response["location"]}"
82
- new_uri = URI.join(request.uri, response["location"])
83
- new_host = new_uri.host
84
- if http.address != new_host ||
85
- http.port != new_uri.port ||
86
- http.use_ssl? != (new_uri.scheme == "https")
87
- logger.debug "changing host from '#{http.address}' to '#{new_host}'"
88
- http = start_http[new_uri]
89
- end
90
- do_request.call prepare_request[new_uri]
91
- when "404"
92
- logger.error "404 at #{request.method} #{request.uri} with body: #{
93
- response.body.is_a?(Net::ReadAdapter) ? "impossible to reread Net::ReadAdapter -- check the IO you've used in block form" : response.body.tap do |body|
94
- body.replace body.strip.gsub(/<[^>]*>/, "") if body["<html>"]
95
- end.inspect
96
- }"
97
- response
98
- when /\A50\d$/
99
- logger.error "#{response.code} at #{request.method} #{request.uri} with body: #{response.body.inspect}"
100
- response
101
- else
102
- logger.info "code #{response.code} at #{request.method} #{request.uri}#{
103
- " and so #{url}" if request.uri.to_s != url
104
- } from #{
105
- [__FILE__, caller.map{ |i| i[/\d+/] }].join ?:
106
- } with body: #{
107
- response.body.tap do |body|
108
- body.replace body.strip.gsub(/<[^>]*>/, "") if body["<html>"]
109
- end.inspect
110
- }" unless response.code.start_with? "20"
111
- response
112
- end
113
- end
114
- do_request[request].tap do |response|
115
- cookies.each{ |k, v| response.add_field "Set-Cookie", "#{k}=#{v};" }
116
- logger.debug response.to_hash
117
- end
118
- end
119
-
120
- def request_data *args
121
- response = get_response *args
122
- throw :"404" if "404" == response.code
123
- throw :"500" if "500" == response.code
124
- response.body
125
- end
126
-
127
- end
128
- self.logger = Logger.new STDOUT
129
- self.logger.level = ENV["LOGLEVEL_#{name}"] ? Logger.const_get(ENV["LOGLEVEL_#{name}"]) : Logger::WARN
130
- self.logger.formatter = lambda do |severity, datetime, progname, msg|
131
- "#{severity.to_s[0]} #{datetime.strftime "%y%m%d %H%M%S"} : #{name} : #{msg}\n"
132
- end
133
- end
134
-
135
-
136
- if $0 == __FILE__
137
- print "self testing... "
138
-
139
- fail unless NetHTTPUtils.request_data("http://httpstat.us/200") == "200 OK"
140
- fail unless NetHTTPUtils.get_response("http://httpstat.us/404").body == "404 Not Found"
141
- catch(:"404"){ fail NetHTTPUtils.request_data "http://httpstat.us/404" }
142
- # TODO raise?
143
- fail unless NetHTTPUtils.request_data("http://httpstat.us/400") == "400 Bad Request"
144
- fail unless NetHTTPUtils.get_response("http://httpstat.us/500").body == "500 Internal Server Error"
145
- catch(:"500"){ fail NetHTTPUtils.request_data "http://httpstat.us/500" }
146
-
147
- puts "OK #{__FILE__}"
148
- end