nethttputils 0.2.5.1 → 0.3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/nethttputils.rb +260 -222
- data/nethttputils.gemspec +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9a9df743f10c31c23bbbcc076750dcea63f50133
|
4
|
+
data.tar.gz: 503c63e39858d28602a042701187a4f08404d349
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 6d88ded5499b3aaddfbd187eb185099d6de32d2e78f941ab6f95ed3db7a4a5f2d5fe4b134c267bd85cbc2199aba15fec6873f2fc8b13eed184e1d4fe566d1acf
|
7
|
+
data.tar.gz: '006529bc8770f5c149572528fe0050a69755f0b2db8efbb76ec2e2fb324e198ce4f7cf176d2e04f9d6a461d1a3d07b74badc85ee96765c01889a86e7f22b2ddd'
|
data/lib/nethttputils.rb
CHANGED
@@ -30,244 +30,262 @@ module NetHTTPUtils
|
|
30
30
|
gsub(/<[^>]*>/, "").split(?\n).map(&:strip).reject(&:empty?).join(?\n)
|
31
31
|
end
|
32
32
|
|
33
|
-
|
34
|
-
|
33
|
+
def start_http url, timeout = 30, max_start_http_retry_delay = 3600
|
34
|
+
fail if url.is_a? URI::HTTP
|
35
35
|
uri = url
|
36
36
|
uri = URI.parse begin
|
37
37
|
URI url
|
38
38
|
url
|
39
39
|
rescue URI::InvalidURIError
|
40
40
|
URI.escape url
|
41
|
-
end
|
41
|
+
end
|
42
|
+
delay = 5
|
43
|
+
begin
|
44
|
+
Net::HTTP.start(
|
45
|
+
uri.host, uri.port,
|
46
|
+
use_ssl: uri.scheme == "https",
|
47
|
+
verify_mode: OpenSSL::SSL::VERIFY_NONE,
|
48
|
+
**({open_timeout: timeout}), # if timeout
|
49
|
+
**({read_timeout: timeout}), # if timeout
|
50
|
+
) do |http|
|
51
|
+
# http.open_timeout = timeout # seems like when opening hangs, this line in unreachable
|
52
|
+
# http.read_timeout = timeout
|
53
|
+
http.set_debug_output( Object.new.tap do |obj|
|
54
|
+
obj.instance_eval do
|
55
|
+
def << msg
|
56
|
+
@@buffer ||= "[Net::HTTP debug] "
|
57
|
+
@@buffer.concat msg
|
58
|
+
@@buffer = @@buffer[0...997] + "..." if @@buffer.size > 500
|
59
|
+
return unless @@buffer.end_with? ?\n
|
60
|
+
NetHTTPUtils.logger.debug @@buffer.sub ?\n, " "
|
61
|
+
@@buffer = nil
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end ) if logger.level == Logger::DEBUG # use `logger.debug?`?
|
65
|
+
http
|
66
|
+
end
|
67
|
+
rescue Errno::ECONNREFUSED => e
|
68
|
+
if max_start_http_retry_delay < delay *= 2
|
69
|
+
e.message.concat " to #{uri}"
|
70
|
+
raise
|
71
|
+
end
|
72
|
+
logger.warn "retrying in #{delay} seconds because of #{e.class} '#{e.message}'"
|
73
|
+
sleep delay
|
74
|
+
retry
|
75
|
+
rescue Errno::EHOSTUNREACH, Errno::ENETUNREACH, Errno::ECONNRESET => e
|
76
|
+
logger.warn "retrying in 5 seconds because of #{e.class} '#{e.message}'"
|
77
|
+
sleep 5
|
78
|
+
retry
|
79
|
+
rescue SocketError => e
|
80
|
+
if max_start_http_retry_delay < delay *= 2
|
81
|
+
e.message.concat " to #{uri}"
|
82
|
+
raise e
|
83
|
+
end
|
84
|
+
logger.warn "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{uri}"
|
85
|
+
sleep delay
|
86
|
+
retry
|
87
|
+
rescue Errno::ETIMEDOUT, Net::OpenTimeout => e
|
88
|
+
raise if max_start_http_retry_delay < delay *= 2
|
89
|
+
logger.warn "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{uri}"
|
90
|
+
sleep delay
|
91
|
+
retry
|
92
|
+
rescue OpenSSL::SSL::SSLError => e
|
93
|
+
raise if max_start_http_retry_delay < delay *= 2
|
94
|
+
logger.error "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{uri}"
|
95
|
+
sleep delay
|
96
|
+
retry
|
97
|
+
end.tap do |http|
|
98
|
+
http.instance_variable_set "@uri", uri
|
99
|
+
http.define_singleton_method :read do |mtd = :GET, type = :form, form: {}, header: {}, auth: nil, timeout: 30,
|
100
|
+
max_read_retry_delay: 3600,
|
101
|
+
patch_request: nil,
|
102
|
+
&block|
|
42
103
|
|
43
|
-
|
44
|
-
# we can't just merge because URI fails to parse such queries as "/?1"
|
104
|
+
logger = NetHTTPUtils.logger
|
45
105
|
|
46
|
-
|
47
|
-
|
48
|
-
prepare_request = lambda do |uri, mtd = :GET, form = {}|
|
49
|
-
case mtd.upcase
|
50
|
-
when :GET ; Net::HTTP::Get
|
51
|
-
when :POST ; Net::HTTP::Post
|
52
|
-
when :PUT ; Net::HTTP::Put
|
53
|
-
when :DELETE ; Net::HTTP::Delete
|
54
|
-
when :PATCH ; Net::HTTP::Patch
|
55
|
-
else ; raise "unknown method '#{mtd}'"
|
56
|
-
end.new(uri).tap do |request| # somehow Get eats even raw url, not URI object
|
57
|
-
patch_request.call uri, form, request if patch_request
|
58
|
-
request.basic_auth *auth if auth
|
59
|
-
# pp Object.instance_method(:method).bind(request).call(:set_form).source_location
|
60
|
-
if (mtd == :POST || mtd == :PATCH) && !form.empty?
|
61
|
-
case type
|
62
|
-
when :json ; request.body = JSON.dump form
|
63
|
-
request.content_type = "application/json"
|
64
|
-
when :form ; if form.any?{ |k, v| v.respond_to? :to_path }
|
65
|
-
request.set_form form, "multipart/form-data"
|
66
|
-
else
|
67
|
-
request.set_form_data form
|
68
|
-
request.content_type = "application/x-www-form-urlencoded;charset=UTF-8"
|
69
|
-
end
|
70
|
-
else ; raise "unknown content-type '#{type}'"
|
71
|
-
end
|
72
|
-
end
|
73
|
-
header.each{ |k, v| request[k.to_s] = v }
|
74
|
-
request["cookie"] = [*request["cookie"], cookies.map{ |k, v| "#{k}=#{v}" }].join "; " unless cookies.empty?
|
106
|
+
logger.warn "Warning: query params included in `url` argument are discarded because `:form` isn't empty" if uri.query && !form.empty?
|
107
|
+
# we can't just merge because URI fails to parse such queries as "/?1"
|
75
108
|
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
uri.host, uri.port,
|
104
|
-
use_ssl: uri.scheme == "https",
|
105
|
-
verify_mode: OpenSSL::SSL::VERIFY_NONE,
|
106
|
-
**({open_timeout: timeout}), # if timeout
|
107
|
-
**({read_timeout: timeout}), # if timeout
|
108
|
-
) do |http|
|
109
|
-
# http.open_timeout = timeout # seems like when opening hangs, this line in unreachable
|
110
|
-
# http.read_timeout = timeout
|
111
|
-
http.set_debug_output( Object.new.tap do |obj|
|
112
|
-
obj.instance_eval do
|
113
|
-
def << msg
|
114
|
-
@@buffer ||= "[Net::HTTP debug] "
|
115
|
-
@@buffer.concat msg
|
116
|
-
@@buffer = @@buffer[0...997] + "..." if @@buffer.size > 500
|
117
|
-
return unless @@buffer.end_with? ?\n
|
118
|
-
NetHTTPUtils.logger.debug @@buffer.sub ?\n, " "
|
119
|
-
@@buffer = nil
|
109
|
+
uri.query = URI.encode_www_form form if :GET == (mtd = mtd.upcase) && !form.empty?
|
110
|
+
cookies = {}
|
111
|
+
prepare_request = lambda do |uri, mtd = :GET, form = {}|
|
112
|
+
case mtd.upcase
|
113
|
+
when :GET ; Net::HTTP::Get
|
114
|
+
when :POST ; Net::HTTP::Post
|
115
|
+
when :PUT ; Net::HTTP::Put
|
116
|
+
when :DELETE ; Net::HTTP::Delete
|
117
|
+
when :PATCH ; Net::HTTP::Patch
|
118
|
+
else ; raise "unknown method '#{mtd}'"
|
119
|
+
end.new(uri).tap do |request| # somehow Get eats even raw url, not URI object
|
120
|
+
patch_request.call uri, form, request if patch_request
|
121
|
+
# p Object.instance_method(:method).bind(request).call(:basic_auth).source_location
|
122
|
+
# p Object.instance_method(:method).bind(request).call(:set_form).source_location
|
123
|
+
# request.basic_auth *p(auth.map(&URI.method(:escape))) if auth
|
124
|
+
request.basic_auth *auth if auth
|
125
|
+
if (mtd == :POST || mtd == :PATCH) && !form.empty?
|
126
|
+
case type
|
127
|
+
when :json ; request.body = JSON.dump form
|
128
|
+
request.content_type = "application/json"
|
129
|
+
when :form ; if form.any?{ |k, v| v.respond_to? :to_path }
|
130
|
+
request.set_form form, "multipart/form-data"
|
131
|
+
else
|
132
|
+
request.set_form_data form
|
133
|
+
request.content_type = "application/x-www-form-urlencoded;charset=UTF-8"
|
134
|
+
end
|
135
|
+
else ; raise "unknown content-type '#{type}'"
|
120
136
|
end
|
121
137
|
end
|
122
|
-
|
123
|
-
|
124
|
-
end
|
125
|
-
rescue Errno::ECONNREFUSED => e
|
126
|
-
if max_econnrefused_retry_delay < delay *= 2
|
127
|
-
e.message.concat " to #{uri}"
|
128
|
-
raise
|
129
|
-
end
|
130
|
-
logger.warn "retrying in #{delay} seconds because of #{e.class} '#{e.message}'"
|
131
|
-
sleep delay
|
132
|
-
retry
|
133
|
-
rescue Errno::EHOSTUNREACH, Errno::ENETUNREACH, Errno::ECONNRESET => e
|
134
|
-
logger.warn "retrying in 5 seconds because of #{e.class} '#{e.message}'"
|
135
|
-
sleep 5
|
136
|
-
retry
|
137
|
-
rescue SocketError => e
|
138
|
-
if max_socketerror_retry_delay < delay *= 2
|
139
|
-
e.message.concat " to #{uri}"
|
140
|
-
raise e
|
141
|
-
end
|
142
|
-
logger.warn "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{uri}"
|
143
|
-
sleep delay
|
144
|
-
retry
|
145
|
-
rescue Errno::ETIMEDOUT, Net::OpenTimeout => e
|
146
|
-
raise if max_timeout_retry_delay < delay *= 2
|
147
|
-
logger.warn "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{uri}"
|
148
|
-
sleep delay
|
149
|
-
retry
|
150
|
-
rescue OpenSSL::SSL::SSLError => e
|
151
|
-
raise if max_sslerror_retry_delay < delay *= 2
|
152
|
-
logger.error "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{uri}"
|
153
|
-
sleep delay
|
154
|
-
retry
|
155
|
-
end
|
156
|
-
end
|
157
|
-
http = start_http[uri]
|
158
|
-
do_request = lambda do |request|
|
159
|
-
delay = 5
|
160
|
-
response = begin
|
161
|
-
http.request request, &block
|
162
|
-
rescue Errno::ECONNREFUSED, Net::ReadTimeout, Net::OpenTimeout, Zlib::BufError, Errno::ECONNRESET, OpenSSL::SSL::SSLError => e
|
163
|
-
raise if max_read_retry_delay < delay *= 2
|
164
|
-
logger.error "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{request.uri}"
|
165
|
-
sleep delay
|
166
|
-
retry
|
167
|
-
end
|
168
|
-
# response.instance_variable_set "@nethttputils_close", http.method(:finish)
|
169
|
-
# response.singleton_class.instance_eval{ attr_accessor :nethttputils_socket_to_close }
|
138
|
+
header.each{ |k, v| request[k.to_s] = v }
|
139
|
+
request["cookie"] = [*request["cookie"], cookies.map{ |k, v| "#{k}=#{v}" }].join "; " unless cookies.empty?
|
170
140
|
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
141
|
+
logger.info "> #{request.class} #{uri.host} #{request.path}"
|
142
|
+
next unless logger.debug?
|
143
|
+
logger.debug "content-type: #{request.content_type}" unless mtd == :GET
|
144
|
+
curl_form = case request.content_type
|
145
|
+
when "application/json" ; "-d #{JSON.dump form} "
|
146
|
+
when "multipart/form-data" ; form.map{ |k, v| "-F \"#{k}=#{v.respond_to?(:to_path) ? "@#{v.to_path}" : v}\" " }.join
|
147
|
+
when "application/x-www-form-urlencoded" ; "-d \"#{URI.encode_www_form form}\" "
|
148
|
+
else ; mtd == :GET ? "" : fail("unknown content-type '#{request.content_type}'")
|
149
|
+
end
|
150
|
+
logger.debug "curl -vsSL --compressed -o /dev/null #{
|
151
|
+
request.each_header.map{ |k, v| "-H \"#{k}: #{v}\" " unless k == "host" }.join
|
152
|
+
}#{curl_form}'#{url.gsub "&", "\\\\&"}#{"?#{uri.query}" if uri.query && uri.query.empty?}'"
|
153
|
+
logger.debug "> header: #{request.each_header.to_a}"
|
154
|
+
logger.debug "> body: #{request.body.inspect.tap{ |body| body[997..-1] = "..." if body.size > 500 }}"
|
155
|
+
# TODO this is buggy -- mixes lines from different files into one line
|
156
|
+
stack = caller.reverse.map do |level|
|
157
|
+
/((?:[^\/:]+\/)?[^\/:]+):([^:]+)/.match(level).captures
|
158
|
+
end.chunk(&:first).map do |file, group|
|
159
|
+
"#{file}:#{group.map(&:last).chunk{|_|_}.map(&:first).join(",")}"
|
160
|
+
end
|
161
|
+
logger.debug stack.join " -> "
|
162
|
+
end
|
191
163
|
end
|
192
|
-
|
164
|
+
http = NetHTTPUtils.start_http url, timeout, max_start_http_retry_delay
|
165
|
+
do_request = lambda do |request|
|
166
|
+
delay = 5
|
167
|
+
response = begin
|
168
|
+
http.request request, &block
|
169
|
+
rescue Errno::ECONNREFUSED, Net::ReadTimeout, Net::OpenTimeout, Zlib::BufError, Errno::ECONNRESET, OpenSSL::SSL::SSLError => e
|
170
|
+
raise if max_read_retry_delay < delay *= 2
|
171
|
+
logger.error "retrying in #{delay} seconds because of #{e.class} '#{e.message}' at: #{request.uri}"
|
172
|
+
sleep delay
|
173
|
+
retry
|
174
|
+
end
|
175
|
+
# response.instance_variable_set "@nethttputils_close", http.method(:finish)
|
176
|
+
# response.singleton_class.instance_eval{ attr_accessor :nethttputils_socket_to_close }
|
193
177
|
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
178
|
+
remaining, reset_time, current_timestamp = if response.key? "x-ratelimit-userremaining"
|
179
|
+
logger.debug "x-ratelimit-clientremaining: #{response.fetch("x-ratelimit-clientremaining").to_i}"
|
180
|
+
[
|
181
|
+
response.fetch("x-ratelimit-userremaining").to_i,
|
182
|
+
response.fetch("x-ratelimit-userreset").to_i,
|
183
|
+
response.fetch("x-timer")[/\d+/].to_i,
|
184
|
+
]
|
185
|
+
elsif response.key? "x-rate-limit-remaining"
|
186
|
+
[
|
187
|
+
response.fetch("x-rate-limit-remaining").to_i,
|
188
|
+
response.fetch("x-rate-limit-reset").to_i,
|
189
|
+
Time.now.to_i,
|
190
|
+
]
|
191
|
+
end
|
192
|
+
if remaining
|
193
|
+
logger.debug "x-remaining: #{remaining}"
|
194
|
+
if remaining <= 100
|
195
|
+
t = (reset_time - current_timestamp + 1).fdiv remaining
|
196
|
+
logger.warn "x-ratelimit sleep #{t} seconds"
|
197
|
+
sleep t
|
198
|
+
end
|
199
|
+
end
|
200
|
+
|
201
|
+
response.to_hash.fetch("set-cookie", []).each do |c|
|
202
|
+
k, v = c.split(?=)
|
203
|
+
logger.debug "set-cookie: #{k}=#{v[/[^;]+/]}"
|
204
|
+
cookies.store k, v[/[^;]+/]
|
205
|
+
end
|
206
|
+
logger.debug "< header: #{response.to_hash}"
|
207
|
+
case response.code
|
208
|
+
when /\A3\d\d\z/
|
209
|
+
logger.info "redirect: #{response["location"]}"
|
210
|
+
new_uri = URI.join request.uri, URI.escape(response["location"])
|
211
|
+
new_host = new_uri.host
|
212
|
+
if http.address != new_host ||
|
213
|
+
http.port != new_uri.port ||
|
214
|
+
http.use_ssl? != (new_uri.scheme == "https")
|
215
|
+
logger.debug "changing host from '#{http.address}' to '#{new_host}'"
|
216
|
+
# http.finish
|
217
|
+
http = start_http new_uri, timeout, max_start_http_retry_delay
|
218
|
+
end
|
219
|
+
do_request.call prepare_request[new_uri]
|
220
|
+
when "404"
|
221
|
+
logger.error "404 at #{request.method} #{request.uri} with body: #{
|
222
|
+
if response.body.is_a? Net::ReadAdapter
|
223
|
+
"impossible to reread Net::ReadAdapter -- check the IO you've used in block form"
|
224
|
+
elsif response.to_hash["content-type"] == ["image/png"]
|
225
|
+
response.to_hash["content-type"].to_s
|
226
|
+
else
|
227
|
+
response.body.tap do |body|
|
228
|
+
body.replace remove_tags body if body[/<html[> ]/]
|
229
|
+
end.inspect
|
230
|
+
end
|
231
|
+
}"
|
232
|
+
response
|
233
|
+
when "429"
|
234
|
+
logger.error "429 at #{request.method} #{request.uri} with body: #{response.body.inspect}"
|
235
|
+
response
|
236
|
+
when /\A50\d\z/
|
237
|
+
logger.error "#{response.code} at #{request.method} #{request.uri} with body: #{
|
238
|
+
response.body.tap do |body|
|
239
|
+
body.replace remove_tags body if body[/<html[> ]/]
|
240
|
+
end.inspect
|
241
|
+
}"
|
242
|
+
response
|
243
|
+
when /\A20/
|
244
|
+
response
|
219
245
|
else
|
220
|
-
response.
|
221
|
-
|
222
|
-
|
246
|
+
logger.warn "code #{response.code} at #{request.method} #{request.uri}#{
|
247
|
+
" and so #{url}" if request.uri.to_s != url
|
248
|
+
} from #{
|
249
|
+
[__FILE__, caller.map{ |i| i[/(?<=:)\d+/] }].join ?:
|
250
|
+
}"
|
251
|
+
logger.debug "< body: #{
|
252
|
+
response.body.tap do |body|
|
253
|
+
body.replace remove_tags body if body[/<html[> ]/]
|
254
|
+
end.inspect
|
255
|
+
}"
|
256
|
+
response
|
223
257
|
end
|
224
|
-
|
225
|
-
response
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
logger.error "#{response.code} at #{request.method} #{request.uri} with body: #{
|
231
|
-
response.body.tap do |body|
|
232
|
-
body.replace remove_tags body if body[/<html[> ]/]
|
233
|
-
end.inspect
|
234
|
-
}"
|
235
|
-
response
|
236
|
-
when /\A20/
|
237
|
-
response
|
238
|
-
else
|
239
|
-
logger.warn "code #{response.code} at #{request.method} #{request.uri}#{
|
240
|
-
" and so #{url}" if request.uri.to_s != url
|
241
|
-
} from #{
|
242
|
-
[__FILE__, caller.map{ |i| i[/(?<=:)\d+/] }].join ?:
|
243
|
-
}"
|
244
|
-
logger.debug "< body: #{
|
245
|
-
response.body.tap do |body|
|
246
|
-
body.replace remove_tags body if body[/<html[> ]/]
|
247
|
-
end.inspect
|
248
|
-
}"
|
249
|
-
response
|
258
|
+
end
|
259
|
+
do_request[prepare_request[uri, mtd, form]].tap do |response|
|
260
|
+
cookies.each{ |k, v| response.add_field "Set-Cookie", "#{k}=#{v};" }
|
261
|
+
logger.debug "< header: #{response.to_hash}"
|
262
|
+
end.body
|
263
|
+
|
250
264
|
end
|
251
265
|
end
|
252
|
-
do_request[prepare_request[uri, mtd, form]].tap do |response|
|
253
|
-
cookies.each{ |k, v| response.add_field "Set-Cookie", "#{k}=#{v};" }
|
254
|
-
logger.debug "< header: #{response.to_hash}"
|
255
|
-
end
|
256
266
|
end
|
257
267
|
|
258
|
-
def request_data
|
259
|
-
|
268
|
+
def request_data http, mtd = :GET, type = :form, form: {}, header: {}, auth: nil, timeout: 30,
|
269
|
+
max_start_http_retry_delay: 3600,
|
270
|
+
max_read_retry_delay: 3600,
|
271
|
+
patch_request: nil, &block
|
272
|
+
http = start_http http, timeout, max_start_http_retry_delay unless http.is_a? Net::HTTP
|
273
|
+
path = http.instance_variable_get(:@uri).path
|
274
|
+
head = http.head path
|
260
275
|
raise Error.new(
|
261
|
-
(
|
262
|
-
|
263
|
-
) unless
|
264
|
-
|
265
|
-
|
276
|
+
(head.to_hash["content-type"] == ["image/png"] ? head.to_hash["content-type"] : head.body),
|
277
|
+
head.code.to_i
|
278
|
+
) unless head.code[/\A(20\d|3\d\d)\z/]
|
279
|
+
body = http.read mtd, type, form: form, header: header, auth: auth, timeout: timeout,
|
280
|
+
max_read_retry_delay: max_read_retry_delay,
|
281
|
+
patch_request: patch_request, &block
|
282
|
+
if head.to_hash["content-encoding"] == "gzip"
|
283
|
+
Zlib::GzipReader.new(StringIO.new(body)).read
|
266
284
|
else
|
267
|
-
|
285
|
+
body
|
268
286
|
end.tap do |string|
|
269
|
-
string.instance_variable_set :@uri_path,
|
270
|
-
string.instance_variable_set :@header,
|
287
|
+
string.instance_variable_set :@uri_path, path
|
288
|
+
string.instance_variable_set :@header, head.to_hash
|
271
289
|
end
|
272
290
|
# ensure
|
273
291
|
# response.instance_variable_get("@nethttputils_close").call if response
|
@@ -282,8 +300,27 @@ if $0 == __FILE__
|
|
282
300
|
print "self testing... "
|
283
301
|
require "pp"
|
284
302
|
|
303
|
+
|
285
304
|
require "webrick"
|
286
305
|
require "json"
|
306
|
+
Thread.abort_on_exception = true
|
307
|
+
|
308
|
+
server = WEBrick::HTTPServer.new Port: 8000
|
309
|
+
stack = []
|
310
|
+
server.mount_proc ?/ do |req, res|
|
311
|
+
stack.push req.request_method
|
312
|
+
end
|
313
|
+
Thread.new{ server.start }
|
314
|
+
NetHTTPUtils.start_http("http://localhost:8000/")
|
315
|
+
fail unless stack == %w{ }
|
316
|
+
stack.clear
|
317
|
+
NetHTTPUtils.start_http("http://localhost:8000/").head("/")
|
318
|
+
fail unless stack == %w{ HEAD }
|
319
|
+
stack.clear
|
320
|
+
NetHTTPUtils.request_data("http://localhost:8000/")
|
321
|
+
fail unless stack == %w{ HEAD GET }
|
322
|
+
server.shutdown
|
323
|
+
|
287
324
|
server = WEBrick::HTTPServer.new Port: 8000
|
288
325
|
server.mount_proc ?/ do |req, res|
|
289
326
|
# pp req.dup.tap{ |_| _.instance_variable_set "@config", nil }
|
@@ -299,6 +336,7 @@ if $0 == __FILE__
|
|
299
336
|
fail unless JSON.dump(["/", %w{ accept-encoding accept user-agent host content-type connection content-length }]) == NetHTTPUtils.request_data("http://localhost:8000/", :post, form: {1=>2})
|
300
337
|
server.shutdown
|
301
338
|
|
339
|
+
|
302
340
|
fail unless NetHTTPUtils.request_data("http://httpstat.us/200") == "200 OK"
|
303
341
|
[400, 404, 500, 502, 503].each do |code|
|
304
342
|
begin
|
@@ -307,11 +345,11 @@ if $0 == __FILE__
|
|
307
345
|
raise unless e.code == code
|
308
346
|
end
|
309
347
|
end
|
310
|
-
fail unless NetHTTPUtils.
|
311
|
-
fail unless NetHTTPUtils.
|
312
|
-
fail unless NetHTTPUtils.
|
313
|
-
fail unless NetHTTPUtils.
|
314
|
-
fail unless NetHTTPUtils.
|
348
|
+
fail unless NetHTTPUtils.start_http("http://httpstat.us/400").read == "400 Bad Request"
|
349
|
+
fail unless NetHTTPUtils.start_http("http://httpstat.us/404").read == "404 Not Found"
|
350
|
+
fail unless NetHTTPUtils.start_http("http://httpstat.us/500").read == "500 Internal Server Error"
|
351
|
+
fail unless NetHTTPUtils.start_http("http://httpstat.us/502").read == "502 Bad Gateway"
|
352
|
+
fail unless NetHTTPUtils.start_http("http://httpstat.us/503").read == "503 Service Unavailable"
|
315
353
|
NetHTTPUtils.logger.level = Logger::FATAL
|
316
354
|
[
|
317
355
|
["https://imgur.com/a/cccccc"],
|
@@ -331,7 +369,7 @@ if $0 == __FILE__
|
|
331
369
|
http://www.cutehalloweencostumeideas.org/wp-content/uploads/2017/10/Niagara-Falls_04.jpg
|
332
370
|
}.each do |url|
|
333
371
|
begin
|
334
|
-
fail NetHTTPUtils.request_data url,
|
372
|
+
fail NetHTTPUtils.request_data url, max_start_http_retry_delay: -1
|
335
373
|
rescue SocketError => e
|
336
374
|
raise unless e.message["getaddrinfo: "]
|
337
375
|
end
|
@@ -347,7 +385,7 @@ if $0 == __FILE__
|
|
347
385
|
end
|
348
386
|
|
349
387
|
begin
|
350
|
-
fail NetHTTPUtils.request_data "https://oi64.tinypic.com/29z7oxs.jpg?", timeout: 5,
|
388
|
+
fail NetHTTPUtils.request_data "https://oi64.tinypic.com/29z7oxs.jpg?", timeout: 5, max_start_http_retry_delay: -1
|
351
389
|
rescue Net::OpenTimeout => e
|
352
390
|
end
|
353
391
|
## this stopped failing on High Sierra
|
data/nethttputils.gemspec
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |spec|
|
2
2
|
spec.name = "nethttputils"
|
3
|
-
spec.version = "0.
|
3
|
+
spec.version = "0.3.0.0"
|
4
4
|
spec.summary = "this tool is like a pet that I adopted young and now I depend on, sorry"
|
5
5
|
spec.description = <<-EOF
|
6
6
|
Back in 2015 I was a guy automating things at my job and two scripts had a common need --
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: nethttputils
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.3.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Victor Maslov aka Nakilon
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-
|
11
|
+
date: 2018-12-30 00:00:00.000000000 Z
|
12
12
|
dependencies: []
|
13
13
|
description: |2
|
14
14
|
Back in 2015 I was a guy automating things at my job and two scripts had a common need --
|