logstash-filter-ezproxy 0.2.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CONTRIBUTORS +1 -1
- data/lib/logstash/filters/ezproxy.rb +48 -65
- data/lib/logstash/filters/ezproxy_providers.rb +1 -0
- data/lib/logstash/filters/{cambridge.rb → ezproxy_providers/cambridge.rb} +5 -3
- data/lib/logstash/filters/{dawsonera.rb → ezproxy_providers/dawsonera.rb} +3 -2
- data/lib/logstash/filters/{proquest.rb → ezproxy_providers/ebookcentral.rb} +4 -2
- data/lib/logstash/filters/{ebscohost.rb → ezproxy_providers/ebscohost.rb} +3 -2
- data/lib/logstash/filters/ezproxy_providers/elsevier.rb +18 -0
- data/lib/logstash/filters/{emerald.rb → ezproxy_providers/emerald.rb} +4 -2
- data/lib/logstash/filters/{ft.rb → ezproxy_providers/ft.rb} +3 -3
- data/lib/logstash/filters/{gale.rb → ezproxy_providers/gale.rb} +4 -2
- data/lib/logstash/filters/{heinonline.rb → ezproxy_providers/heinonline.rb} +3 -2
- data/lib/logstash/filters/{ieee.rb → ezproxy_providers/ieee.rb} +3 -4
- data/lib/logstash/filters/{jstor.rb → ezproxy_providers/jstor.rb} +3 -2
- data/lib/logstash/filters/{lexis_webanalytics.rb → ezproxy_providers/lexis_webanalytics.rb} +3 -2
- data/lib/logstash/filters/{lexisnexis.rb → ezproxy_providers/lexisnexis.rb} +3 -3
- data/lib/logstash/filters/{myilibrary.rb → ezproxy_providers/myilibrary.rb} +7 -3
- data/lib/logstash/filters/{oxford.rb → ezproxy_providers/oxford.rb} +5 -3
- data/lib/logstash/filters/ezproxy_providers/proquest-search.rb +27 -0
- data/lib/logstash/filters/{sage.rb → ezproxy_providers/sage.rb} +3 -3
- data/lib/logstash/filters/{sciencedirect.rb → ezproxy_providers/sciencedirect.rb} +3 -4
- data/lib/logstash/filters/{scopus.rb → ezproxy_providers/scopus.rb} +3 -2
- data/lib/logstash/filters/{springer.rb → ezproxy_providers/springer.rb} +3 -3
- data/lib/logstash/filters/{tandf.rb → ezproxy_providers/tandf.rb} +3 -5
- data/lib/logstash/filters/{webofknowledge.rb → ezproxy_providers/webofknowledge.rb} +4 -2
- data/lib/logstash/filters/ezproxy_providers/westlaw.rb +18 -0
- data/lib/logstash/filters/{wiley.rb → ezproxy_providers/wiley.rb} +3 -3
- data/lib/logstash/helpers/param_helper.rb +0 -8
- data/logstash-filter-ezproxy.gemspec +2 -2
- data/spec/filters/cambridge/cambridge_spec.rb +3 -3
- data/spec/filters/dawsonera/dawsonera_spec.rb +3 -3
- data/spec/filters/{proquest/proquest.2018-02-09.csv → ebookcentral/ebookcentral.2018-02-09.csv} +0 -0
- data/spec/filters/{proquest/proquest_spec.rb → ebookcentral/ebookcentral.rb} +4 -4
- data/spec/filters/ebscohost/ebscohost_spec.rb +3 -3
- data/spec/filters/elsevier/elsevier.2018-02-20.csv +3 -0
- data/spec/filters/elsevier/elsevier_spec.rb +18 -0
- data/spec/filters/emerald/emerald_spec.rb +3 -3
- data/spec/filters/ft/ft_spec.rb +3 -3
- data/spec/filters/gale/gale_spec.rb +3 -3
- data/spec/filters/heinonline/heinonline_spec.rb +3 -3
- data/spec/filters/ieee/ieee_spec.rb +3 -3
- data/spec/filters/jstor/jstor_spec.rb +3 -3
- data/spec/filters/lexis_webanalytics/lexis_webanalytics_spec.rb +3 -3
- data/spec/filters/lexisnexis/lexisnexis_spec.rb +3 -3
- data/spec/filters/myilibrary/myilibrary_spec.rb +3 -3
- data/spec/filters/oxford/oxford_spec.rb +3 -3
- data/spec/filters/proquest-search/proquest-search.2018-02-19.csv +4 -0
- data/spec/filters/proquest-search/proquest-search_spec.rb +19 -0
- data/spec/filters/sage/sage_spec.rb +3 -3
- data/spec/filters/sciencedirect/sciencedirect_spec.rb +3 -3
- data/spec/filters/scopus/scopus_spec.rb +3 -3
- data/spec/filters/springer/springer_spec.rb +3 -3
- data/spec/filters/tandf/tandf_spec.rb +3 -3
- data/spec/filters/webofknowledge/webofknowledge_spec.rb +3 -3
- data/spec/filters/westlaw/westlaw.2018-02-20.csv +3 -0
- data/spec/filters/westlaw/westlaw_spec.rb +18 -0
- data/spec/filters/wiley/wiley_spec.rb +4 -4
- metadata +44 -28
| @@ -0,0 +1,18 @@ | |
| 1 | 
            +
            require_relative '../ezproxy_providers'
         | 
| 2 | 
            +
             | 
| 3 | 
            +
            class EzproxyProviders::Westlaw
         | 
| 4 | 
            +
              def self.parse (path, params)
         | 
| 5 | 
            +
             | 
| 6 | 
            +
                data = {
         | 
| 7 | 
            +
                    "provider" => "westlaw"
         | 
| 8 | 
            +
                }
         | 
| 9 | 
            +
             | 
| 10 | 
            +
                if params.key?('docguid')
         | 
| 11 | 
            +
                  data['unit_id'] = params['docguid'][0]
         | 
| 12 | 
            +
                  data['title_id'] = params['docguid'][0]
         | 
| 13 | 
            +
                end
         | 
| 14 | 
            +
             | 
| 15 | 
            +
                return data
         | 
| 16 | 
            +
             | 
| 17 | 
            +
              end
         | 
| 18 | 
            +
            end
         | 
| @@ -1,10 +1,10 @@ | |
| 1 1 | 
             
            Gem::Specification.new do |s|
         | 
| 2 2 | 
             
              s.name          = 'logstash-filter-ezproxy'
         | 
| 3 | 
            -
              s.version       = '0. | 
| 3 | 
            +
              s.version       = '0.3.0'
         | 
| 4 4 | 
             
              s.licenses      = ['Apache-2.0']
         | 
| 5 5 | 
             
              s.summary       = 'Write a short summary, because Rubygems requires one.'
         | 
| 6 6 | 
             
              s.authors       = ['Dom Belcher']
         | 
| 7 | 
            -
              s.email         = ' | 
| 7 | 
            +
              s.email         = 'd.belcher@lancaster.ac.uk'
         | 
| 8 8 | 
             
              s.require_paths = ['lib']
         | 
| 9 9 |  | 
| 10 10 | 
             
              # Files
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/cambridge'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/cambridge'
         | 
| 2 2 | 
             
            require 'csv'
         | 
| 3 3 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe Cambridge do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::Cambridge do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/cambridge/cambridge.2013-10-28.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[10])
         | 
| 8 8 | 
             
                    uri = parsed_url['uri']
         | 
| @@ -11,7 +11,7 @@ RSpec.describe Cambridge do | |
| 11 11 |  | 
| 12 12 | 
             
                    name = "tests a URL " + row[10]
         | 
| 13 13 | 
             
                    it name do
         | 
| 14 | 
            -
                        data = Cambridge.parse(path, params, uri)
         | 
| 14 | 
            +
                        data = EzproxyProviders::Cambridge.parse(path, params, uri)
         | 
| 15 15 | 
             
                        expect(data["online_identifier"]).to eq(row[0])
         | 
| 16 16 | 
             
                        expect(data["print_identifier"]).to eq(row[1])
         | 
| 17 17 | 
             
                        expect(data["volume"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/dawsonera'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/dawsonera'
         | 
| 2 2 | 
             
            require 'csv'
         | 
| 3 3 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe DawsonEra do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::DawsonEra do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/dawsonera/dawsonera.2014-09-03.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[4])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe DawsonEra do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[4]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = DawsonEra.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::DawsonEra.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["unit_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["online_identifier"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[2])
         | 
    
        data/spec/filters/{proquest/proquest.2018-02-09.csv → ebookcentral/ebookcentral.2018-02-09.csv}
    RENAMED
    
    | 
            File without changes
         | 
| @@ -1,16 +1,16 @@ | |
| 1 | 
            -
            require 'logstash/filters/ | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/ebookcentral'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe  | 
| 6 | 
            -
                CSV.foreach('spec/filters/ | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::EBookCentral do
         | 
| 6 | 
            +
                CSV.foreach('spec/filters/ebookcentral/ebookcentral.2018-02-09.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[6])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| 9 9 | 
             
                    params = parsed_url['params']
         | 
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[6]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data =  | 
| 13 | 
            +
                        data = EzproxyProviders::EBookCentral.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["unit_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["title_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/ebscohost'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/ebscohost'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe Ebscohost do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::Ebscohost do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/ebscohost/ebscohost.2014-08-21.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[11])
         | 
| 8 8 | 
             
                    uri = parsed_url['uri']
         | 
| @@ -11,7 +11,7 @@ RSpec.describe Ebscohost do | |
| 11 11 |  | 
| 12 12 | 
             
                    name = "tests a URL " + row[11]
         | 
| 13 13 | 
             
                    it name do
         | 
| 14 | 
            -
                        data = Ebscohost.parse(path, params, uri)
         | 
| 14 | 
            +
                        data = EzproxyProviders::Ebscohost.parse(path, params, uri)
         | 
| 15 15 | 
             
                        expect(data["unit_id"]).to eq(row[0])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[1])
         | 
| 17 17 | 
             
                        expect(data["mime"]).to eq(row[2])
         | 
| @@ -0,0 +1,3 @@ | |
| 1 | 
            +
            out-unitid;out-titleid;in-url
         | 
| 2 | 
            +
            illustrated-textbooks-paediatrics;illustrated-textbooks-paediatrics;https://elsevierelibrary.co.uk:443/pdfreader/illustrated-textbooks-paediatrics
         | 
| 3 | 
            +
            kumar-clarks-clinical-medicine80515;kumar-clarks-clinical-medicine80515;http://elsevierelibrary.co.uk:80/product/kumar-clarks-clinical-medicine80515
         | 
| @@ -0,0 +1,18 @@ | |
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/elsevier'
         | 
| 2 | 
            +
            require 'csv'
         | 
| 3 | 
            +
            require 'logstash/helpers/url_parser'
         | 
| 4 | 
            +
             | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::Elsevier do
         | 
| 6 | 
            +
                CSV.foreach('spec/filters/elsevier/elsevier.2018-02-20.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 | 
            +
                    parsed_url = URLParser::parse(row[2])
         | 
| 8 | 
            +
                    path = parsed_url['path']
         | 
| 9 | 
            +
                    params = parsed_url['params']
         | 
| 10 | 
            +
             | 
| 11 | 
            +
                    name = "tests a URL " + row[2]
         | 
| 12 | 
            +
                    it name do
         | 
| 13 | 
            +
                        data = EzproxyProviders::Elsevier.parse(path, params)
         | 
| 14 | 
            +
                        expect(data["unit_id"]).to eq(row[0])
         | 
| 15 | 
            +
                        expect(data["title_id"]).to eq(row[1])
         | 
| 16 | 
            +
                    end
         | 
| 17 | 
            +
                end
         | 
| 18 | 
            +
            end
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/emerald'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/emerald'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe Emerald do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::Emerald do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/emerald/emerald.2015-08-11.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[6])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe Emerald do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[6]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = Emerald.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::Emerald.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["title_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["doi"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["unit_id"]).to eq(row[2])
         | 
    
        data/spec/filters/ft/ft_spec.rb
    CHANGED
    
    | @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/ft'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/ft'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe FT do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::FT do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/ft/ft.2016-11-22.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[5])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe FT do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[5]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = FT.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::FT.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["title_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["unit_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["publication_date"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/gale'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/gale'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe Gale do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::Gale do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/gale/gale.2016-05-11.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[4])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe Gale do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[4]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = Gale::parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::Gale::parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["title_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["unit_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/heinonline'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/heinonline'
         | 
| 2 2 | 
             
            require 'csv'
         | 
| 3 3 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe HeinOnline do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::HeinOnline do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/heinonline/heinonline.2015-05-18.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[4])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe HeinOnline do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[4]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = HeinOnline.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::HeinOnline.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["title_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["unit_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/ieee'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/ieee'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe IEEE do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::IEEE do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/ieee/ieee.2015-04-15.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[4])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe IEEE do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[4]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = IEEE.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::IEEE.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["title_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["unit_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/jstor'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/jstor'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe Jstor do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::Jstor do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/jstor/jstor.2013-10-03.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[9])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe Jstor do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[9]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = Jstor.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::Jstor.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["unit_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["doi"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["title_id"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/lexis_webanalytics'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/lexis_webanalytics'
         | 
| 2 2 | 
             
            require 'csv'
         | 
| 3 3 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe LexisWebAnalytics do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::LexisWebAnalytics do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/lexis_webanalytics/lexis360.2017-04-28.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[4])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -11,7 +11,7 @@ RSpec.describe LexisWebAnalytics do | |
| 11 11 |  | 
| 12 12 | 
             
                    name = "tests a URL " + row[4]
         | 
| 13 13 | 
             
                    it name do
         | 
| 14 | 
            -
                        data = LexisWebAnalytics.parse(path, params, uri)
         | 
| 14 | 
            +
                        data = EzproxyProviders::LexisWebAnalytics.parse(path, params, uri)
         | 
| 15 15 | 
             
                        expect(data["title_id"]).to eq(row[0])
         | 
| 16 16 | 
             
                        expect(data["unit_id"]).to eq(row[1])
         | 
| 17 17 | 
             
                        expect(data["rtype"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/lexisnexis'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/lexisnexis'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe LexisNexis do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::LexisNexis do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/lexisnexis/lexisnexis.2013-05-17.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[5])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe LexisNexis do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[5]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = LexisNexis.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::LexisNexis.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["unit_id"]).to eq(row[2])
         | 
| 15 15 | 
             
                        expect(data["title_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[3])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/myilibrary'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/myilibrary'
         | 
| 2 2 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 3 3 | 
             
            require 'csv'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe MyILibrary do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::MyILibrary do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/myilibrary/myilibrary.2018-02-09.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[4])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe MyILibrary do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[4]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = MyILibrary.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::MyILibrary.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["unit_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["title_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["rtype"]).to eq(row[2])
         | 
| @@ -1,8 +1,8 @@ | |
| 1 | 
            -
            require 'logstash/filters/oxford'
         | 
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/oxford'
         | 
| 2 2 | 
             
            require 'csv'
         | 
| 3 3 | 
             
            require 'logstash/helpers/url_parser'
         | 
| 4 4 |  | 
| 5 | 
            -
            RSpec.describe Oxford do
         | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::Oxford do
         | 
| 6 6 | 
             
                CSV.foreach('spec/filters/oxford/oxford.2018-02-15.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 7 | 
             
                    parsed_url = URLParser::parse(row[5])
         | 
| 8 8 | 
             
                    path = parsed_url['path']
         | 
| @@ -10,7 +10,7 @@ RSpec.describe Oxford do | |
| 10 10 |  | 
| 11 11 | 
             
                    name = "tests a URL " + row[5]
         | 
| 12 12 | 
             
                    it name do
         | 
| 13 | 
            -
                        data = Oxford.parse(path, params)
         | 
| 13 | 
            +
                        data = EzproxyProviders::Oxford.parse(path, params)
         | 
| 14 14 | 
             
                        expect(data["title_id"]).to eq(row[0])
         | 
| 15 15 | 
             
                        expect(data["unit_id"]).to eq(row[1])
         | 
| 16 16 | 
             
                        expect(data["doi"]).to eq(row[2])
         | 
| @@ -0,0 +1,4 @@ | |
| 1 | 
            +
            out-unitid;out-title-id;out-mime;in-url
         | 
| 2 | 
            +
            868915501;868915501;PDF;https://search.proquest.com:443/docview/868915501/fulltextPDF?accountid=11979
         | 
| 3 | 
            +
            899707879;899707879;;https://search.proquest.com:443/docview/899707879/1CB6445015894B84PQ/1?accountid=11979
         | 
| 4 | 
            +
            1762049797;1762049797;;https://search.proquest.com:443/docview.headertitle:imagecheck?t:ac=1762049797
         | 
| @@ -0,0 +1,19 @@ | |
| 1 | 
            +
            require 'logstash/filters/ezproxy_providers/proquest-search'
         | 
| 2 | 
            +
            require 'csv'
         | 
| 3 | 
            +
            require 'logstash/helpers/url_parser'
         | 
| 4 | 
            +
             | 
| 5 | 
            +
            RSpec.describe EzproxyProviders::ProquestSearch do
         | 
| 6 | 
            +
                CSV.foreach('spec/filters/proquest-search/proquest-search.2018-02-19.csv', { :col_sep => ';', headers: true }) do |row|
         | 
| 7 | 
            +
                    parsed_url = URLParser::parse(row[3])
         | 
| 8 | 
            +
                    path = parsed_url['path']
         | 
| 9 | 
            +
                    params = parsed_url['params']
         | 
| 10 | 
            +
             | 
| 11 | 
            +
                    name = "tests a URL " + row[3]
         | 
| 12 | 
            +
                    it name do
         | 
| 13 | 
            +
                        data = EzproxyProviders::ProquestSearch.parse(path, params)
         | 
| 14 | 
            +
                        expect(data["unit_id"]).to eq(row[0])
         | 
| 15 | 
            +
                        expect(data["title_id"]).to eq(row[1])
         | 
| 16 | 
            +
                        expect(data["mime"]).to eq(row[2])
         | 
| 17 | 
            +
                    end
         | 
| 18 | 
            +
                end
         | 
| 19 | 
            +
            end
         |