logstash-filter-ezproxy 0.2.0 → 0.3.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/CONTRIBUTORS +1 -1
  3. data/lib/logstash/filters/ezproxy.rb +48 -65
  4. data/lib/logstash/filters/ezproxy_providers.rb +1 -0
  5. data/lib/logstash/filters/{cambridge.rb → ezproxy_providers/cambridge.rb} +5 -3
  6. data/lib/logstash/filters/{dawsonera.rb → ezproxy_providers/dawsonera.rb} +3 -2
  7. data/lib/logstash/filters/{proquest.rb → ezproxy_providers/ebookcentral.rb} +4 -2
  8. data/lib/logstash/filters/{ebscohost.rb → ezproxy_providers/ebscohost.rb} +3 -2
  9. data/lib/logstash/filters/ezproxy_providers/elsevier.rb +18 -0
  10. data/lib/logstash/filters/{emerald.rb → ezproxy_providers/emerald.rb} +4 -2
  11. data/lib/logstash/filters/{ft.rb → ezproxy_providers/ft.rb} +3 -3
  12. data/lib/logstash/filters/{gale.rb → ezproxy_providers/gale.rb} +4 -2
  13. data/lib/logstash/filters/{heinonline.rb → ezproxy_providers/heinonline.rb} +3 -2
  14. data/lib/logstash/filters/{ieee.rb → ezproxy_providers/ieee.rb} +3 -4
  15. data/lib/logstash/filters/{jstor.rb → ezproxy_providers/jstor.rb} +3 -2
  16. data/lib/logstash/filters/{lexis_webanalytics.rb → ezproxy_providers/lexis_webanalytics.rb} +3 -2
  17. data/lib/logstash/filters/{lexisnexis.rb → ezproxy_providers/lexisnexis.rb} +3 -3
  18. data/lib/logstash/filters/{myilibrary.rb → ezproxy_providers/myilibrary.rb} +7 -3
  19. data/lib/logstash/filters/{oxford.rb → ezproxy_providers/oxford.rb} +5 -3
  20. data/lib/logstash/filters/ezproxy_providers/proquest-search.rb +27 -0
  21. data/lib/logstash/filters/{sage.rb → ezproxy_providers/sage.rb} +3 -3
  22. data/lib/logstash/filters/{sciencedirect.rb → ezproxy_providers/sciencedirect.rb} +3 -4
  23. data/lib/logstash/filters/{scopus.rb → ezproxy_providers/scopus.rb} +3 -2
  24. data/lib/logstash/filters/{springer.rb → ezproxy_providers/springer.rb} +3 -3
  25. data/lib/logstash/filters/{tandf.rb → ezproxy_providers/tandf.rb} +3 -5
  26. data/lib/logstash/filters/{webofknowledge.rb → ezproxy_providers/webofknowledge.rb} +4 -2
  27. data/lib/logstash/filters/ezproxy_providers/westlaw.rb +18 -0
  28. data/lib/logstash/filters/{wiley.rb → ezproxy_providers/wiley.rb} +3 -3
  29. data/lib/logstash/helpers/param_helper.rb +0 -8
  30. data/logstash-filter-ezproxy.gemspec +2 -2
  31. data/spec/filters/cambridge/cambridge_spec.rb +3 -3
  32. data/spec/filters/dawsonera/dawsonera_spec.rb +3 -3
  33. data/spec/filters/{proquest/proquest.2018-02-09.csv → ebookcentral/ebookcentral.2018-02-09.csv} +0 -0
  34. data/spec/filters/{proquest/proquest_spec.rb → ebookcentral/ebookcentral.rb} +4 -4
  35. data/spec/filters/ebscohost/ebscohost_spec.rb +3 -3
  36. data/spec/filters/elsevier/elsevier.2018-02-20.csv +3 -0
  37. data/spec/filters/elsevier/elsevier_spec.rb +18 -0
  38. data/spec/filters/emerald/emerald_spec.rb +3 -3
  39. data/spec/filters/ft/ft_spec.rb +3 -3
  40. data/spec/filters/gale/gale_spec.rb +3 -3
  41. data/spec/filters/heinonline/heinonline_spec.rb +3 -3
  42. data/spec/filters/ieee/ieee_spec.rb +3 -3
  43. data/spec/filters/jstor/jstor_spec.rb +3 -3
  44. data/spec/filters/lexis_webanalytics/lexis_webanalytics_spec.rb +3 -3
  45. data/spec/filters/lexisnexis/lexisnexis_spec.rb +3 -3
  46. data/spec/filters/myilibrary/myilibrary_spec.rb +3 -3
  47. data/spec/filters/oxford/oxford_spec.rb +3 -3
  48. data/spec/filters/proquest-search/proquest-search.2018-02-19.csv +4 -0
  49. data/spec/filters/proquest-search/proquest-search_spec.rb +19 -0
  50. data/spec/filters/sage/sage_spec.rb +3 -3
  51. data/spec/filters/sciencedirect/sciencedirect_spec.rb +3 -3
  52. data/spec/filters/scopus/scopus_spec.rb +3 -3
  53. data/spec/filters/springer/springer_spec.rb +3 -3
  54. data/spec/filters/tandf/tandf_spec.rb +3 -3
  55. data/spec/filters/webofknowledge/webofknowledge_spec.rb +3 -3
  56. data/spec/filters/westlaw/westlaw.2018-02-20.csv +3 -0
  57. data/spec/filters/westlaw/westlaw_spec.rb +18 -0
  58. data/spec/filters/wiley/wiley_spec.rb +4 -4
  59. metadata +44 -28
@@ -1,6 +1,7 @@
1
+ require_relative '../ezproxy_providers'
1
2
 
2
- module Scopus
3
- def Scopus.parse (path, params)
3
+ class EzproxyProviders::Scopus
4
+ def self.parse (path, params)
4
5
 
5
6
  data = {
6
7
  "provider" => "scopus"
@@ -1,8 +1,8 @@
1
1
  require 'uri'
2
- require 'cgi'
2
+ require_relative '../ezproxy_providers'
3
3
 
4
- module Springer
5
- def Springer.parse (path, params)
4
+ class EzproxyProviders::Springer
5
+ def self.parse (path, params)
6
6
 
7
7
  data = {
8
8
  "provider" => "springer"
@@ -1,9 +1,7 @@
1
+ require_relative '../ezproxy_providers'
1
2
 
2
- require 'uri'
3
- require 'cgi'
4
-
5
- module TandF
6
- def TandF.parse (path, params)
3
+ class EzproxyProviders::TandF
4
+ def self.parse (path, params)
7
5
 
8
6
  data = {
9
7
  "provider" => "tandf"
@@ -1,5 +1,7 @@
1
- module WebOfKnowledge
2
- def WebOfKnowledge.parse (path, params)
1
+ require_relative '../ezproxy_providers'
2
+
3
+ class EzproxyProviders::WebOfKnowledge
4
+ def self.parse (path, params)
3
5
 
4
6
  data = {
5
7
  "provider" => "dawsonera"
@@ -0,0 +1,18 @@
1
+ require_relative '../ezproxy_providers'
2
+
3
+ class EzproxyProviders::Westlaw
4
+ def self.parse (path, params)
5
+
6
+ data = {
7
+ "provider" => "westlaw"
8
+ }
9
+
10
+ if params.key?('docguid')
11
+ data['unit_id'] = params['docguid'][0]
12
+ data['title_id'] = params['docguid'][0]
13
+ end
14
+
15
+ return data
16
+
17
+ end
18
+ end
@@ -1,7 +1,7 @@
1
- require 'uri'
1
+ require_relative '../ezproxy_providers'
2
2
 
3
- module Wiley
4
- def Wiley.parse (path, params)
3
+ class EzproxyProviders::Wiley
4
+ def self.parse (path, params)
5
5
 
6
6
  data = {
7
7
  "provider" => "wiley"
@@ -19,14 +19,6 @@ module ParamHelper
19
19
  end
20
20
  end
21
21
 
22
- # params.each do |key, value|
23
- # params_hash[key] = value
24
- # end
25
-
26
- # unless params.empty?
27
- # data['params'] = JSON::generate(params)
28
- # end
29
-
30
22
  return data
31
23
  end
32
24
  end
@@ -1,10 +1,10 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-filter-ezproxy'
3
- s.version = '0.2.0'
3
+ s.version = '0.3.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = 'Write a short summary, because Rubygems requires one.'
6
6
  s.authors = ['Dom Belcher']
7
- s.email = 'dominic.belcher@gmail.com'
7
+ s.email = 'd.belcher@lancaster.ac.uk'
8
8
  s.require_paths = ['lib']
9
9
 
10
10
  # Files
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/cambridge'
1
+ require 'logstash/filters/ezproxy_providers/cambridge'
2
2
  require 'csv'
3
3
  require 'logstash/helpers/url_parser'
4
4
 
5
- RSpec.describe Cambridge do
5
+ RSpec.describe EzproxyProviders::Cambridge do
6
6
  CSV.foreach('spec/filters/cambridge/cambridge.2013-10-28.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[10])
8
8
  uri = parsed_url['uri']
@@ -11,7 +11,7 @@ RSpec.describe Cambridge do
11
11
 
12
12
  name = "tests a URL " + row[10]
13
13
  it name do
14
- data = Cambridge.parse(path, params, uri)
14
+ data = EzproxyProviders::Cambridge.parse(path, params, uri)
15
15
  expect(data["online_identifier"]).to eq(row[0])
16
16
  expect(data["print_identifier"]).to eq(row[1])
17
17
  expect(data["volume"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/dawsonera'
1
+ require 'logstash/filters/ezproxy_providers/dawsonera'
2
2
  require 'csv'
3
3
  require 'logstash/helpers/url_parser'
4
4
 
5
- RSpec.describe DawsonEra do
5
+ RSpec.describe EzproxyProviders::DawsonEra do
6
6
  CSV.foreach('spec/filters/dawsonera/dawsonera.2014-09-03.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[4])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe DawsonEra do
10
10
 
11
11
  name = "tests a URL " + row[4]
12
12
  it name do
13
- data = DawsonEra.parse(path, params)
13
+ data = EzproxyProviders::DawsonEra.parse(path, params)
14
14
  expect(data["unit_id"]).to eq(row[0])
15
15
  expect(data["online_identifier"]).to eq(row[1])
16
16
  expect(data["rtype"]).to eq(row[2])
@@ -1,16 +1,16 @@
1
- require 'logstash/filters/proquest'
1
+ require 'logstash/filters/ezproxy_providers/ebookcentral'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe Proquest do
6
- CSV.foreach('spec/filters/proquest/proquest.2018-02-09.csv', { :col_sep => ';', headers: true }) do |row|
5
+ RSpec.describe EzproxyProviders::EBookCentral do
6
+ CSV.foreach('spec/filters/ebookcentral/ebookcentral.2018-02-09.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[6])
8
8
  path = parsed_url['path']
9
9
  params = parsed_url['params']
10
10
 
11
11
  name = "tests a URL " + row[6]
12
12
  it name do
13
- data = Proquest.parse(path, params)
13
+ data = EzproxyProviders::EBookCentral.parse(path, params)
14
14
  expect(data["unit_id"]).to eq(row[0])
15
15
  expect(data["title_id"]).to eq(row[1])
16
16
  expect(data["rtype"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/ebscohost'
1
+ require 'logstash/filters/ezproxy_providers/ebscohost'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe Ebscohost do
5
+ RSpec.describe EzproxyProviders::Ebscohost do
6
6
  CSV.foreach('spec/filters/ebscohost/ebscohost.2014-08-21.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[11])
8
8
  uri = parsed_url['uri']
@@ -11,7 +11,7 @@ RSpec.describe Ebscohost do
11
11
 
12
12
  name = "tests a URL " + row[11]
13
13
  it name do
14
- data = Ebscohost.parse(path, params, uri)
14
+ data = EzproxyProviders::Ebscohost.parse(path, params, uri)
15
15
  expect(data["unit_id"]).to eq(row[0])
16
16
  expect(data["rtype"]).to eq(row[1])
17
17
  expect(data["mime"]).to eq(row[2])
@@ -0,0 +1,3 @@
1
+ out-unitid;out-titleid;in-url
2
+ illustrated-textbooks-paediatrics;illustrated-textbooks-paediatrics;https://elsevierelibrary.co.uk:443/pdfreader/illustrated-textbooks-paediatrics
3
+ kumar-clarks-clinical-medicine80515;kumar-clarks-clinical-medicine80515;http://elsevierelibrary.co.uk:80/product/kumar-clarks-clinical-medicine80515
@@ -0,0 +1,18 @@
1
+ require 'logstash/filters/ezproxy_providers/elsevier'
2
+ require 'csv'
3
+ require 'logstash/helpers/url_parser'
4
+
5
+ RSpec.describe EzproxyProviders::Elsevier do
6
+ CSV.foreach('spec/filters/elsevier/elsevier.2018-02-20.csv', { :col_sep => ';', headers: true }) do |row|
7
+ parsed_url = URLParser::parse(row[2])
8
+ path = parsed_url['path']
9
+ params = parsed_url['params']
10
+
11
+ name = "tests a URL " + row[2]
12
+ it name do
13
+ data = EzproxyProviders::Elsevier.parse(path, params)
14
+ expect(data["unit_id"]).to eq(row[0])
15
+ expect(data["title_id"]).to eq(row[1])
16
+ end
17
+ end
18
+ end
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/emerald'
1
+ require 'logstash/filters/ezproxy_providers/emerald'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe Emerald do
5
+ RSpec.describe EzproxyProviders::Emerald do
6
6
  CSV.foreach('spec/filters/emerald/emerald.2015-08-11.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[6])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe Emerald do
10
10
 
11
11
  name = "tests a URL " + row[6]
12
12
  it name do
13
- data = Emerald.parse(path, params)
13
+ data = EzproxyProviders::Emerald.parse(path, params)
14
14
  expect(data["title_id"]).to eq(row[0])
15
15
  expect(data["doi"]).to eq(row[1])
16
16
  expect(data["unit_id"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/ft'
1
+ require 'logstash/filters/ezproxy_providers/ft'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe FT do
5
+ RSpec.describe EzproxyProviders::FT do
6
6
  CSV.foreach('spec/filters/ft/ft.2016-11-22.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[5])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe FT do
10
10
 
11
11
  name = "tests a URL " + row[5]
12
12
  it name do
13
- data = FT.parse(path, params)
13
+ data = EzproxyProviders::FT.parse(path, params)
14
14
  expect(data["title_id"]).to eq(row[0])
15
15
  expect(data["unit_id"]).to eq(row[1])
16
16
  expect(data["publication_date"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/gale'
1
+ require 'logstash/filters/ezproxy_providers/gale'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe Gale do
5
+ RSpec.describe EzproxyProviders::Gale do
6
6
  CSV.foreach('spec/filters/gale/gale.2016-05-11.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[4])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe Gale do
10
10
 
11
11
  name = "tests a URL " + row[4]
12
12
  it name do
13
- data = Gale::parse(path, params)
13
+ data = EzproxyProviders::Gale::parse(path, params)
14
14
  expect(data["title_id"]).to eq(row[0])
15
15
  expect(data["unit_id"]).to eq(row[1])
16
16
  expect(data["rtype"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/heinonline'
1
+ require 'logstash/filters/ezproxy_providers/heinonline'
2
2
  require 'csv'
3
3
  require 'logstash/helpers/url_parser'
4
4
 
5
- RSpec.describe HeinOnline do
5
+ RSpec.describe EzproxyProviders::HeinOnline do
6
6
  CSV.foreach('spec/filters/heinonline/heinonline.2015-05-18.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[4])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe HeinOnline do
10
10
 
11
11
  name = "tests a URL " + row[4]
12
12
  it name do
13
- data = HeinOnline.parse(path, params)
13
+ data = EzproxyProviders::HeinOnline.parse(path, params)
14
14
  expect(data["title_id"]).to eq(row[0])
15
15
  expect(data["unit_id"]).to eq(row[1])
16
16
  expect(data["rtype"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/ieee'
1
+ require 'logstash/filters/ezproxy_providers/ieee'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe IEEE do
5
+ RSpec.describe EzproxyProviders::IEEE do
6
6
  CSV.foreach('spec/filters/ieee/ieee.2015-04-15.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[4])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe IEEE do
10
10
 
11
11
  name = "tests a URL " + row[4]
12
12
  it name do
13
- data = IEEE.parse(path, params)
13
+ data = EzproxyProviders::IEEE.parse(path, params)
14
14
  expect(data["title_id"]).to eq(row[0])
15
15
  expect(data["unit_id"]).to eq(row[1])
16
16
  expect(data["rtype"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/jstor'
1
+ require 'logstash/filters/ezproxy_providers/jstor'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe Jstor do
5
+ RSpec.describe EzproxyProviders::Jstor do
6
6
  CSV.foreach('spec/filters/jstor/jstor.2013-10-03.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[9])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe Jstor do
10
10
 
11
11
  name = "tests a URL " + row[9]
12
12
  it name do
13
- data = Jstor.parse(path, params)
13
+ data = EzproxyProviders::Jstor.parse(path, params)
14
14
  expect(data["unit_id"]).to eq(row[0])
15
15
  expect(data["doi"]).to eq(row[1])
16
16
  expect(data["title_id"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/lexis_webanalytics'
1
+ require 'logstash/filters/ezproxy_providers/lexis_webanalytics'
2
2
  require 'csv'
3
3
  require 'logstash/helpers/url_parser'
4
4
 
5
- RSpec.describe LexisWebAnalytics do
5
+ RSpec.describe EzproxyProviders::LexisWebAnalytics do
6
6
  CSV.foreach('spec/filters/lexis_webanalytics/lexis360.2017-04-28.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[4])
8
8
  path = parsed_url['path']
@@ -11,7 +11,7 @@ RSpec.describe LexisWebAnalytics do
11
11
 
12
12
  name = "tests a URL " + row[4]
13
13
  it name do
14
- data = LexisWebAnalytics.parse(path, params, uri)
14
+ data = EzproxyProviders::LexisWebAnalytics.parse(path, params, uri)
15
15
  expect(data["title_id"]).to eq(row[0])
16
16
  expect(data["unit_id"]).to eq(row[1])
17
17
  expect(data["rtype"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/lexisnexis'
1
+ require 'logstash/filters/ezproxy_providers/lexisnexis'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe LexisNexis do
5
+ RSpec.describe EzproxyProviders::LexisNexis do
6
6
  CSV.foreach('spec/filters/lexisnexis/lexisnexis.2013-05-17.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[5])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe LexisNexis do
10
10
 
11
11
  name = "tests a URL " + row[5]
12
12
  it name do
13
- data = LexisNexis.parse(path, params)
13
+ data = EzproxyProviders::LexisNexis.parse(path, params)
14
14
  expect(data["unit_id"]).to eq(row[2])
15
15
  expect(data["title_id"]).to eq(row[1])
16
16
  expect(data["rtype"]).to eq(row[3])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/myilibrary'
1
+ require 'logstash/filters/ezproxy_providers/myilibrary'
2
2
  require 'logstash/helpers/url_parser'
3
3
  require 'csv'
4
4
 
5
- RSpec.describe MyILibrary do
5
+ RSpec.describe EzproxyProviders::MyILibrary do
6
6
  CSV.foreach('spec/filters/myilibrary/myilibrary.2018-02-09.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[4])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe MyILibrary do
10
10
 
11
11
  name = "tests a URL " + row[4]
12
12
  it name do
13
- data = MyILibrary.parse(path, params)
13
+ data = EzproxyProviders::MyILibrary.parse(path, params)
14
14
  expect(data["unit_id"]).to eq(row[0])
15
15
  expect(data["title_id"]).to eq(row[1])
16
16
  expect(data["rtype"]).to eq(row[2])
@@ -1,8 +1,8 @@
1
- require 'logstash/filters/oxford'
1
+ require 'logstash/filters/ezproxy_providers/oxford'
2
2
  require 'csv'
3
3
  require 'logstash/helpers/url_parser'
4
4
 
5
- RSpec.describe Oxford do
5
+ RSpec.describe EzproxyProviders::Oxford do
6
6
  CSV.foreach('spec/filters/oxford/oxford.2018-02-15.csv', { :col_sep => ';', headers: true }) do |row|
7
7
  parsed_url = URLParser::parse(row[5])
8
8
  path = parsed_url['path']
@@ -10,7 +10,7 @@ RSpec.describe Oxford do
10
10
 
11
11
  name = "tests a URL " + row[5]
12
12
  it name do
13
- data = Oxford.parse(path, params)
13
+ data = EzproxyProviders::Oxford.parse(path, params)
14
14
  expect(data["title_id"]).to eq(row[0])
15
15
  expect(data["unit_id"]).to eq(row[1])
16
16
  expect(data["doi"]).to eq(row[2])
@@ -0,0 +1,4 @@
1
+ out-unitid;out-title-id;out-mime;in-url
2
+ 868915501;868915501;PDF;https://search.proquest.com:443/docview/868915501/fulltextPDF?accountid=11979
3
+ 899707879;899707879;;https://search.proquest.com:443/docview/899707879/1CB6445015894B84PQ/1?accountid=11979
4
+ 1762049797;1762049797;;https://search.proquest.com:443/docview.headertitle:imagecheck?t:ac=1762049797
@@ -0,0 +1,19 @@
1
+ require 'logstash/filters/ezproxy_providers/proquest-search'
2
+ require 'csv'
3
+ require 'logstash/helpers/url_parser'
4
+
5
+ RSpec.describe EzproxyProviders::ProquestSearch do
6
+ CSV.foreach('spec/filters/proquest-search/proquest-search.2018-02-19.csv', { :col_sep => ';', headers: true }) do |row|
7
+ parsed_url = URLParser::parse(row[3])
8
+ path = parsed_url['path']
9
+ params = parsed_url['params']
10
+
11
+ name = "tests a URL " + row[3]
12
+ it name do
13
+ data = EzproxyProviders::ProquestSearch.parse(path, params)
14
+ expect(data["unit_id"]).to eq(row[0])
15
+ expect(data["title_id"]).to eq(row[1])
16
+ expect(data["mime"]).to eq(row[2])
17
+ end
18
+ end
19
+ end