url_parser 0.3.1 → 0.3.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 22e8debbc6c8c9b0a48ad0b3111f77c59a4300a3
4
- data.tar.gz: 13a411e8114a2903636fe92b170f035db95c1c23
3
+ metadata.gz: 09b0929a6a38afe26b698634a884e309bbcd84a0
4
+ data.tar.gz: 70ec9fba2693c46a884878d59468c3ba85b50975
5
5
  SHA512:
6
- metadata.gz: 47f2df87f79ebcf71d7c0c5e49b3983bdfe9277fbf8a413d9b96b1b7a2bdc387172aae7d942018081e0c25e7fde4444a38f0b04f9327c39e975685cce295e4e5
7
- data.tar.gz: 45877a16f11ff89e67cda914c267573da564d4e9f121fc891f7307799339b3301892371d8395a224cb1fa38901b91e6f836d90ca017d918a3979ae3bcb4fca3a
6
+ metadata.gz: b6e85bfec45e2045ff22622b2d18dac0d8e36c04e4867e0b3436ae00ed80cc9275a270344adc19c406cbb5d433f81488c63e6f44d8a0281d68c08cbd68092043
7
+ data.tar.gz: bf8f1eeade03e380ac45f92d77da18577ee2ae58e9c3a2df27ceb88faed4cf841bd34e9d853f28f9c54ce7ca64d5b3478e0883777fc79a09082a1e102f79fabe
@@ -1,3 +1,3 @@
1
1
  module UrlParser
2
- VERSION = "0.3.1"
2
+ VERSION = "0.3.2"
3
3
  end
data/lib/url_parser.rb CHANGED
@@ -139,32 +139,38 @@ module UrlParser
139
139
  end
140
140
 
141
141
  def www
142
- return parser.subdomain if parser.subdomain.empty?
142
+ return nil if parser.subdomain.empty?
143
143
  parts = slice_domain.split('.')
144
- parts.first =~ /www?\d*/ ? parts.shift : ""
144
+ parts.first =~ /www?\d*/ ? parts.shift : nil
145
145
  end
146
146
 
147
147
  def subdomain
148
- return parser.subdomain if parser.subdomain.empty?
148
+ return nil if parser.subdomain.empty?
149
149
  parts = slice_domain.split('.')
150
150
  parts.shift if parts.first =~ /www?\d*/
151
151
  parts.compact.join('.')
152
152
  end
153
153
 
154
154
  def subdomains
155
+ return nil if parser.subdomain.empty?
155
156
  [ www, subdomain ].compact.join('.')
156
157
  end
157
158
 
158
159
  def domain_name
159
- parser.domain
160
+ parser.domain.empty? ? nil : parser.domain
160
161
  end
161
162
 
162
163
  def domain
163
- parser.domain_with_public_suffix
164
+ if parser.domain_with_public_suffix.empty?
165
+ nil
166
+ else
167
+ parser.domain_with_public_suffix
168
+ end
164
169
  end
165
170
 
166
171
  def tld
167
- parser.public_suffix
172
+ tld = parser.public_suffix
173
+ tld.empty? ? nil : tld
168
174
  end
169
175
 
170
176
  def hostname
@@ -176,11 +182,12 @@ module UrlParser
176
182
  end
177
183
 
178
184
  def host
179
- [ hostname, port ].compact.join(':')
185
+ name = [ hostname, port ].compact.join(':')
186
+ name.empty? ? nil : name
180
187
  end
181
188
 
182
189
  def origin
183
- url.origin
190
+ url.origin == "null" ? nil : url.origin
184
191
  end
185
192
 
186
193
  def authority
@@ -193,8 +200,9 @@ module UrlParser
193
200
 
194
201
  def directory
195
202
  parts = path.split('/')
196
- parts.pop unless segment.empty?
197
- parts.unshift('') unless parts.first.empty?
203
+ return '/' if parts.empty?
204
+ parts.pop unless segment.to_s.empty?
205
+ parts.unshift('') unless parts.first.to_s.empty?
198
206
  parts.compact.join('/')
199
207
  end
200
208
 
@@ -203,19 +211,19 @@ module UrlParser
203
211
  end
204
212
 
205
213
  def segment
206
- path =~ /\/\z/ ? '' : path.split('/').last
214
+ path =~ /\/\z/ ? nil : path.split('/').last
207
215
  end
208
216
 
209
217
  def filename
210
- return 'index.html' if segment.empty?
211
- return '' if suffix.empty?
218
+ return 'index.html' if segment.to_s.empty?
219
+ return '' if suffix.to_s.empty?
212
220
  segment
213
221
  end
214
222
 
215
223
  def suffix
216
224
  ext = File.extname(path)
217
225
  ext[0] = '' if ext[0] == '.'
218
- ext
226
+ ext.empty? ? nil : ext
219
227
  end
220
228
 
221
229
  def query
@@ -231,7 +239,10 @@ module UrlParser
231
239
  end
232
240
 
233
241
  def resource
234
- [ [ segment, query ].compact.join('?'), fragment ].compact.join('#')
242
+ name = [
243
+ [ segment, query ].compact.join('?'), fragment
244
+ ].compact.join('#')
245
+ name.empty? ? nil : name
235
246
  end
236
247
 
237
248
  def relative?
@@ -271,37 +271,75 @@ describe UrlParser do
271
271
  #
272
272
  context "uri components" do
273
273
 
274
- let(:link) do
275
- 'foo://username:password@ww2.foo.bar.example.com:123/hello/world/there.html?name=ferret#foo'
276
- end
277
-
278
274
  let(:parser) { UrlParser.new(link, clean: false) }
279
275
 
280
- it { expect(parser.scheme).to eq 'foo' }
281
- it { expect(parser.username).to eq 'username' }
282
- it { expect(parser.password).to eq 'password' }
283
- it { expect(parser.userinfo).to eq 'username:password' }
284
- it { expect(parser.www).to eq 'ww2' }
285
- it { expect(parser.subdomain).to eq 'foo.bar' }
286
- it { expect(parser.subdomains).to eq 'ww2.foo.bar' }
287
- it { expect(parser.domain_name).to eq 'example' }
288
- it { expect(parser.domain).to eq 'example.com' }
289
- it { expect(parser.tld).to eq 'com' }
290
- it { expect(parser.hostname).to eq 'ww2.foo.bar.example.com' }
291
- it { expect(parser.port).to eq 123 }
292
- it { expect(parser.host).to eq 'ww2.foo.bar.example.com:123' }
293
- it { expect(parser.origin).to eq 'foo://ww2.foo.bar.example.com:123' }
294
- it { expect(parser.authority).to eq 'username:password@ww2.foo.bar.example.com:123' }
295
- it { expect(parser.site).to eq 'foo://username:password@ww2.foo.bar.example.com:123' }
296
- it { expect(parser.directory).to eq '/hello/world' }
297
- it { expect(parser.path).to eq '/hello/world/there.html' }
298
- it { expect(parser.segment).to eq 'there.html' }
299
- it { expect(parser.filename).to eq 'there.html' }
300
- it { expect(parser.suffix).to eq 'html' }
301
- it { expect(parser.query).to eq 'name=ferret' }
302
- it { expect(parser.query_values['name']).to eq 'ferret' }
303
- it { expect(parser.fragment).to eq 'foo' }
304
- it { expect(parser.resource).to eq 'there.html?name=ferret#foo' }
276
+ context "when present" do
277
+
278
+ let(:link) do
279
+ 'foo://username:password@ww2.foo.bar.example.com:123/hello/world/there.html?name=ferret#foo'
280
+ end
281
+
282
+ it { expect(parser.scheme).to eq 'foo' }
283
+ it { expect(parser.username).to eq 'username' }
284
+ it { expect(parser.password).to eq 'password' }
285
+ it { expect(parser.userinfo).to eq 'username:password' }
286
+ it { expect(parser.www).to eq 'ww2' }
287
+ it { expect(parser.subdomain).to eq 'foo.bar' }
288
+ it { expect(parser.subdomains).to eq 'ww2.foo.bar' }
289
+ it { expect(parser.domain_name).to eq 'example' }
290
+ it { expect(parser.domain).to eq 'example.com' }
291
+ it { expect(parser.tld).to eq 'com' }
292
+ it { expect(parser.hostname).to eq 'ww2.foo.bar.example.com' }
293
+ it { expect(parser.port).to eq 123 }
294
+ it { expect(parser.host).to eq 'ww2.foo.bar.example.com:123' }
295
+ it { expect(parser.origin).to eq 'foo://ww2.foo.bar.example.com:123' }
296
+ it { expect(parser.authority).to eq 'username:password@ww2.foo.bar.example.com:123' }
297
+ it { expect(parser.site).to eq 'foo://username:password@ww2.foo.bar.example.com:123' }
298
+ it { expect(parser.directory).to eq '/hello/world' }
299
+ it { expect(parser.path).to eq '/hello/world/there.html' }
300
+ it { expect(parser.segment).to eq 'there.html' }
301
+ it { expect(parser.filename).to eq 'there.html' }
302
+ it { expect(parser.suffix).to eq 'html' }
303
+ it { expect(parser.query).to eq 'name=ferret' }
304
+ it { expect(parser.query_values['name']).to eq 'ferret' }
305
+ it { expect(parser.fragment).to eq 'foo' }
306
+ it { expect(parser.resource).to eq 'there.html?name=ferret#foo' }
307
+
308
+ end
309
+
310
+ context "when missing" do
311
+
312
+ let(:link) do
313
+ '/'
314
+ end
315
+
316
+ it { expect(parser.scheme).to be_nil }
317
+ it { expect(parser.username).to be_nil }
318
+ it { expect(parser.password).to be_nil }
319
+ it { expect(parser.userinfo).to be_nil }
320
+ it { expect(parser.www).to be_nil }
321
+ it { expect(parser.subdomain).to be_nil }
322
+ it { expect(parser.subdomains).to be_nil }
323
+ it { expect(parser.domain_name).to be_nil }
324
+ it { expect(parser.domain).to be_nil }
325
+ it { expect(parser.tld).to be_nil }
326
+ it { expect(parser.hostname).to be_nil }
327
+ it { expect(parser.port).to be_nil }
328
+ it { expect(parser.host).to be_nil }
329
+ it { expect(parser.origin).to be_nil }
330
+ it { expect(parser.authority).to be_nil }
331
+ it { expect(parser.site).to be_nil }
332
+ it { expect(parser.directory).to eq '/' }
333
+ it { expect(parser.path).to eq '/' }
334
+ it { expect(parser.segment).to be_nil }
335
+ it { expect(parser.filename).to eq 'index.html' }
336
+ it { expect(parser.suffix).to be_nil }
337
+ it { expect(parser.query).to be_nil }
338
+ it { expect(parser.query_values['name']).to be_nil }
339
+ it { expect(parser.fragment).to be_nil }
340
+ it { expect(parser.resource).to be_nil }
341
+
342
+ end
305
343
 
306
344
  end
307
345
 
@@ -343,9 +381,9 @@ describe UrlParser do
343
381
  expect(parser.subdomain).to eq 'foo.bar'
344
382
  end
345
383
 
346
- it "returns an empty string if there is no subdomain" do
384
+ it "returns nil if there is no subdomain" do
347
385
  url = UrlParser.new('https://github.com/')
348
- expect(url.subdomain).to eq ''
386
+ expect(url.subdomain).to be_nil
349
387
  end
350
388
 
351
389
  it "does not include www as part of the subdomain" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: url_parser
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Matt Solt
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-08-07 00:00:00.000000000 Z
11
+ date: 2014-08-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler