geo_combine 0.1.0 → 0.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/.travis.yml +6 -1
- data/Gemfile +2 -1
- data/README.md +108 -23
- data/geo_combine.gemspec +4 -2
- data/lib/geo_combine.rb +11 -1
- data/lib/geo_combine/bounding_box.rb +71 -0
- data/lib/geo_combine/ckan_metadata.rb +112 -0
- data/lib/geo_combine/esri_open_data.rb +0 -9
- data/lib/geo_combine/exceptions.rb +8 -0
- data/lib/geo_combine/formatting.rb +6 -1
- data/lib/geo_combine/geo_blacklight_harvester.rb +203 -0
- data/lib/geo_combine/geoblacklight.rb +80 -12
- data/lib/geo_combine/ogp.rb +229 -0
- data/lib/geo_combine/railtie.rb +7 -0
- data/lib/geo_combine/version.rb +1 -1
- data/lib/tasks/geo_combine.rake +54 -20
- data/lib/xslt/fgdc2geoBL.xsl +95 -154
- data/lib/xslt/fgdc2html.xsl +105 -157
- data/lib/xslt/iso2geoBL.xsl +62 -84
- data/lib/xslt/iso2html.xsl +1107 -1070
- data/spec/features/iso2html_spec.rb +7 -1
- data/spec/fixtures/docs/basic_geoblacklight.json +5 -7
- data/spec/fixtures/docs/ckan.json +456 -0
- data/spec/fixtures/docs/full_geoblacklight.json +2 -8
- data/spec/fixtures/docs/geoblacklight_pre_v1.json +37 -0
- data/spec/fixtures/docs/ogp_harvard_line.json +28 -0
- data/spec/fixtures/docs/ogp_harvard_raster.json +28 -0
- data/spec/fixtures/docs/ogp_tufts_vector.json +31 -0
- data/spec/fixtures/json_docs.rb +20 -0
- data/spec/lib/geo_combine/bounding_box_spec.rb +59 -0
- data/spec/lib/geo_combine/ckan_metadata_spec.rb +114 -0
- data/spec/lib/geo_combine/esri_open_data_spec.rb +1 -14
- data/spec/lib/geo_combine/fgdc_spec.rb +11 -14
- data/spec/lib/geo_combine/formatting_spec.rb +6 -0
- data/spec/lib/geo_combine/geo_blacklight_harvester_spec.rb +190 -0
- data/spec/lib/geo_combine/geoblacklight_spec.rb +137 -11
- data/spec/lib/geo_combine/iso19139_spec.rb +5 -2
- data/spec/lib/geo_combine/ogp_spec.rb +163 -0
- data/spec/spec_helper.rb +1 -0
- metadata +63 -14
- data/lib/schema/geoblacklight-schema.json +0 -169
@@ -19,4 +19,10 @@ RSpec.describe GeoCombine::Formatting do
|
|
19
19
|
expect(subject.sanitize_and_remove_lines(dirty)).to_not match(/\n/)
|
20
20
|
end
|
21
21
|
end
|
22
|
+
describe '#sluggify' do
|
23
|
+
let(:preslug) { 'HARVARD...Co_0l' }
|
24
|
+
it 'handles multiple . and _ and uppercase' do
|
25
|
+
expect(subject.sluggify(preslug)).to eq 'harvard-co-0l'
|
26
|
+
end
|
27
|
+
end
|
22
28
|
end
|
@@ -0,0 +1,190 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
require 'rsolr'
|
5
|
+
|
6
|
+
RSpec.describe GeoCombine::GeoBlacklightHarvester do
|
7
|
+
let(:site_key) { :INSTITUTION }
|
8
|
+
let(:stub_json_response) { '{}' }
|
9
|
+
let(:stub_solr_connection) { double('RSolr::Connection') }
|
10
|
+
subject(:harvester) { described_class.new(site_key) }
|
11
|
+
|
12
|
+
before do
|
13
|
+
allow(described_class).to receive(:config).and_return({
|
14
|
+
INSTITUTION: {
|
15
|
+
host: 'https://example.com/',
|
16
|
+
params: {
|
17
|
+
f: { dct_provenance_s: ['INSTITUTION'] }
|
18
|
+
}
|
19
|
+
}
|
20
|
+
})
|
21
|
+
end
|
22
|
+
|
23
|
+
describe 'initialization' do
|
24
|
+
context 'when an unconfigured site is sent in' do
|
25
|
+
let(:site_key) { 'unknown' }
|
26
|
+
|
27
|
+
it { expect { harvester }.to raise_error(ArgumentError) }
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
describe '#index' do
|
32
|
+
before do
|
33
|
+
expect(Net::HTTP).to receive(:get).with(
|
34
|
+
URI('https://example.com?f%5Bdct_provenance_s%5D%5B%5D=INSTITUTION&format=json&per_page=100&page=1')
|
35
|
+
).and_return(stub_json_response)
|
36
|
+
allow(RSolr).to receive(:connect).and_return(stub_solr_connection)
|
37
|
+
end
|
38
|
+
|
39
|
+
let(:docs) { [{ layer_slug_s: 'abc-123' }, { layer_slug_s: 'abc-321' }] }
|
40
|
+
let(:stub_json_response) do
|
41
|
+
{ response: { docs: docs, pages: { current_page: 1, total_pages: 1 } } }.to_json
|
42
|
+
end
|
43
|
+
|
44
|
+
it 'adds documents returned to solr' do
|
45
|
+
expect(stub_solr_connection).to receive(:update).with(hash_including(data: docs.to_json)).and_return(nil)
|
46
|
+
harvester.index
|
47
|
+
end
|
48
|
+
|
49
|
+
describe 'document tranformations' do
|
50
|
+
let(:docs) do
|
51
|
+
[
|
52
|
+
{ layer_slug_s: 'abc-123', _version_: '1', timestamp: '1999-12-31', score: 0.1 },
|
53
|
+
{ layer_slug_s: 'abc-321', dc_source_s: 'abc-123' }
|
54
|
+
]
|
55
|
+
end
|
56
|
+
|
57
|
+
context 'when a tranformer is set' do
|
58
|
+
before do
|
59
|
+
expect(described_class).to receive(:document_transformer).at_least(:once).and_return(
|
60
|
+
->(doc) {
|
61
|
+
doc.delete('_version_')
|
62
|
+
doc
|
63
|
+
}
|
64
|
+
)
|
65
|
+
end
|
66
|
+
|
67
|
+
it 'removes the _version_ field as requested' do
|
68
|
+
expect(stub_solr_connection).to receive(:update).with(
|
69
|
+
hash_including(
|
70
|
+
data: [
|
71
|
+
{ layer_slug_s: 'abc-123', timestamp: '1999-12-31', score: 0.1 },
|
72
|
+
{ layer_slug_s: 'abc-321', dc_source_s: 'abc-123' }
|
73
|
+
].to_json
|
74
|
+
)
|
75
|
+
).and_return(nil)
|
76
|
+
|
77
|
+
harvester.index
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
context 'when no transformer is set' do
|
82
|
+
it 'removes the _version_, timestamp, and score fields' do
|
83
|
+
expect(stub_solr_connection).to receive(:update).with(
|
84
|
+
hash_including(
|
85
|
+
data: [
|
86
|
+
{ layer_slug_s: 'abc-123' },
|
87
|
+
{ layer_slug_s: 'abc-321', dc_source_s: 'abc-123' }
|
88
|
+
].to_json
|
89
|
+
)
|
90
|
+
).and_return(nil)
|
91
|
+
|
92
|
+
harvester.index
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
describe 'BlacklightResponseVersionFactory' do
|
99
|
+
let(:version_class) { described_class::BlacklightResponseVersionFactory.call(json) }
|
100
|
+
|
101
|
+
context 'when a legacy blacklight version (6 and earlier)' do
|
102
|
+
let(:json) { { 'response' => {} } }
|
103
|
+
|
104
|
+
it { expect(version_class).to be described_class::LegacyBlacklightResponse }
|
105
|
+
end
|
106
|
+
|
107
|
+
context 'when a modern blacklight version (7 and later)' do
|
108
|
+
let(:json) { { 'links' => {}, 'data' => [] } }
|
109
|
+
|
110
|
+
it { expect(version_class).to be described_class::ModernBlacklightResponse }
|
111
|
+
end
|
112
|
+
|
113
|
+
context 'when a the JSON response is not recognizable' do
|
114
|
+
let(:json) { { error: 'Broken' } }
|
115
|
+
|
116
|
+
it { expect { version_class }.to raise_error(NotImplementedError) }
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
describe 'LegacyBlacklightResponse' do
|
121
|
+
before do
|
122
|
+
allow(RSolr).to receive(:connect).and_return(stub_solr_connection)
|
123
|
+
end
|
124
|
+
|
125
|
+
let(:first_docs) { [{ 'layer_slug_s' => 'abc-123' }, { 'layer_slug_s' => 'abc-321' }] }
|
126
|
+
let(:second_docs) { [{ 'layer_slug_s' => 'xyz-123' }, { 'layer_slug_s' => 'xyz-321' }] }
|
127
|
+
let(:stub_first_response) do
|
128
|
+
{ 'response' => { 'docs' => first_docs, 'pages' => { 'current_page' => 1, 'total_pages' => 2 } } }
|
129
|
+
end
|
130
|
+
let(:stub_second_response) do
|
131
|
+
{ 'response' => { 'docs' => second_docs, 'pages' => { 'current_page' => 2, 'total_pages' => 2 } } }
|
132
|
+
end
|
133
|
+
|
134
|
+
describe '#documents' do
|
135
|
+
it 'pages through the response and returns all the documents' do
|
136
|
+
expect(Net::HTTP).to receive(:get).with(
|
137
|
+
URI('https://example.com?f%5Bdct_provenance_s%5D%5B%5D=INSTITUTION&format=json&per_page=100&page=2')
|
138
|
+
).and_return(stub_second_response.to_json)
|
139
|
+
base_url = 'https://example.com?f%5Bdct_provenance_s%5D%5B%5D=INSTITUTION&format=json&per_page=100'
|
140
|
+
docs = described_class::LegacyBlacklightResponse.new(response: stub_first_response, base_url: base_url).documents
|
141
|
+
|
142
|
+
expect(docs.to_a).to eq([first_docs, second_docs])
|
143
|
+
end
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
describe 'ModernBlacklightResponse' do
|
148
|
+
before do
|
149
|
+
allow(RSolr).to receive(:connect).and_return(stub_solr_connection)
|
150
|
+
expect(Net::HTTP).to receive(:get).with(
|
151
|
+
URI('https://example.com?f%5Bdct_provenance_s%5D%5B%5D=INSTITUTION&format=json&per_page=100&page=2')
|
152
|
+
).and_return(second_results_response.to_json)
|
153
|
+
end
|
154
|
+
|
155
|
+
let(:first_results_response) do
|
156
|
+
{ 'data' => [
|
157
|
+
{ 'links' => { 'self' => 'https://example.com/catalog/abc-123' } },
|
158
|
+
{ 'links' => { 'self' => 'https://example.com/catalog/abc-321' } }
|
159
|
+
],
|
160
|
+
'links' => { 'next' => 'https://example.com?f%5Bdct_provenance_s%5D%5B%5D=INSTITUTION&format=json&per_page=100&page=2' }
|
161
|
+
}
|
162
|
+
end
|
163
|
+
|
164
|
+
let(:second_results_response) do
|
165
|
+
{ 'data' => [
|
166
|
+
{ 'links' => { 'self' => 'https://example.com/catalog/xyz-123' } },
|
167
|
+
{ 'links' => { 'self' => 'https://example.com/catalog/xyz-321' } }
|
168
|
+
]
|
169
|
+
}
|
170
|
+
end
|
171
|
+
|
172
|
+
describe '#documents' do
|
173
|
+
it 'pages through the response and fetches documents for each "link" on the response data' do
|
174
|
+
%w[abc-123 abc-321 xyz-123 xyz-321].each do |id|
|
175
|
+
expect(Net::HTTP).to receive(:get).with(
|
176
|
+
URI("https://example.com/catalog/#{id}/raw")
|
177
|
+
).and_return({ 'layer_slug_s' => id }.to_json)
|
178
|
+
end
|
179
|
+
|
180
|
+
base_url = 'https://example.com?f%5Bdct_provenance_s%5D%5B%5D=INSTITUTION&format=json&per_page=100'
|
181
|
+
docs = described_class::ModernBlacklightResponse.new(response: first_results_response, base_url: base_url).documents
|
182
|
+
|
183
|
+
expect(docs.to_a).to eq([
|
184
|
+
[{ 'layer_slug_s' => 'abc-123' }, { 'layer_slug_s' => 'abc-321' }],
|
185
|
+
[{ 'layer_slug_s' => 'xyz-123' }, { 'layer_slug_s' => 'xyz-321' }],
|
186
|
+
])
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
190
|
+
end
|
@@ -3,16 +3,18 @@ require 'spec_helper'
|
|
3
3
|
RSpec.describe GeoCombine::Geoblacklight do
|
4
4
|
include XmlDocs
|
5
5
|
include JsonDocs
|
6
|
-
|
7
|
-
let(:
|
6
|
+
include GeoCombine::Exceptions
|
7
|
+
let(:full_geobl) { described_class.new(full_geoblacklight) }
|
8
|
+
let(:basic_geobl) { described_class.new(basic_geoblacklight) }
|
9
|
+
let(:pre_v1_geobl) { described_class.new(geoblacklight_pre_v1) }
|
8
10
|
describe '#initialize' do
|
9
11
|
it 'parses metadata argument JSON to Hash' do
|
10
12
|
expect(basic_geobl.instance_variable_get(:@metadata)).to be_an Hash
|
11
13
|
end
|
12
14
|
describe 'merges fields argument into metadata' do
|
13
|
-
let(:basic_geobl) {
|
15
|
+
let(:basic_geobl) { described_class.new(basic_geoblacklight, 'dc_identifier_s' => 'new one', "extra_field" => true)}
|
14
16
|
it 'overwrites existing metadata fields' do
|
15
|
-
expect(basic_geobl.metadata['
|
17
|
+
expect(basic_geobl.metadata['dc_identifier_s']).to eq 'new one'
|
16
18
|
end
|
17
19
|
it 'adds in new fields' do
|
18
20
|
expect(basic_geobl.metadata['extra_field']).to be true
|
@@ -22,7 +24,7 @@ RSpec.describe GeoCombine::Geoblacklight do
|
|
22
24
|
describe '#metadata' do
|
23
25
|
it 'reads the metadata instance variable' do
|
24
26
|
expect(basic_geobl.metadata).to be_an Hash
|
25
|
-
expect(basic_geobl.metadata).to have_key '
|
27
|
+
expect(basic_geobl.metadata).to have_key 'dc_identifier_s'
|
26
28
|
end
|
27
29
|
end
|
28
30
|
describe '#to_json' do
|
@@ -31,13 +33,9 @@ RSpec.describe GeoCombine::Geoblacklight do
|
|
31
33
|
expect(valid_json?(basic_geobl.to_json)).to be_truthy
|
32
34
|
end
|
33
35
|
end
|
36
|
+
let(:enhanced_geobl) { described_class.new(basic_geoblacklight, 'layer_geom_type_s' => 'esriGeometryPolygon') }
|
37
|
+
before { enhanced_geobl.enhance_metadata }
|
34
38
|
describe '#enhance_metadata' do
|
35
|
-
let(:enhanced_geobl) { GeoCombine::Geoblacklight.new(basic_geoblacklight, 'dct_references_s' => '', 'layer_geom_type_s' => 'esriGeometryPolygon') }
|
36
|
-
before { enhanced_geobl.enhance_metadata }
|
37
|
-
it 'calls enhancement methods to validate document' do
|
38
|
-
expect { basic_geobl.valid? }.to raise_error JSON::Schema::ValidationError
|
39
|
-
expect(enhanced_geobl.valid?).to be true
|
40
|
-
end
|
41
39
|
it 'enhances the dc_subject_sm field' do
|
42
40
|
expect(enhanced_geobl.metadata['dc_subject_sm']).to include 'Boundaries', 'Inland Waters'
|
43
41
|
end
|
@@ -52,8 +50,136 @@ RSpec.describe GeoCombine::Geoblacklight do
|
|
52
50
|
it 'a valid geoblacklight-schema document should be valid' do
|
53
51
|
expect(full_geobl.valid?).to be true
|
54
52
|
end
|
53
|
+
context 'must have required fields' do
|
54
|
+
%w(
|
55
|
+
dc_title_s
|
56
|
+
dc_identifier_s
|
57
|
+
dc_rights_s
|
58
|
+
dct_provenance_s
|
59
|
+
layer_slug_s
|
60
|
+
solr_geom
|
61
|
+
).each do |field|
|
62
|
+
it field do
|
63
|
+
full_geobl.metadata.delete field
|
64
|
+
expect { full_geobl.valid? }.to raise_error(JSON::Schema::ValidationError, /#{field}/)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
context 'need not have optional fields' do
|
69
|
+
%w(
|
70
|
+
dc_description_s
|
71
|
+
dc_format_s
|
72
|
+
dc_language_s
|
73
|
+
dc_publisher_s
|
74
|
+
dc_source_sm
|
75
|
+
dc_subject_sm
|
76
|
+
dct_isPartOf_sm
|
77
|
+
dct_issued_dt
|
78
|
+
dct_references_s
|
79
|
+
dct_spatial_sm
|
80
|
+
dct_temporal_sm
|
81
|
+
geoblacklight_version
|
82
|
+
layer_geom_type_s
|
83
|
+
layer_id_s
|
84
|
+
layer_modified_dt
|
85
|
+
solr_year_i
|
86
|
+
).each do |field|
|
87
|
+
it field do
|
88
|
+
full_geobl.metadata.delete field
|
89
|
+
expect { full_geobl.valid? }.not_to raise_error
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
context 'need not have deprecated fields' do
|
94
|
+
%w(
|
95
|
+
dc_relation_sm
|
96
|
+
dc_type_s
|
97
|
+
georss_box_s
|
98
|
+
georss_point_s
|
99
|
+
uuid
|
100
|
+
).each do |field|
|
101
|
+
it field do
|
102
|
+
full_geobl.metadata.delete field
|
103
|
+
expect { full_geobl.valid? }.not_to raise_error
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
55
107
|
it 'an invalid document' do
|
56
108
|
expect { basic_geobl.valid? }.to raise_error JSON::Schema::ValidationError
|
57
109
|
end
|
110
|
+
it 'calls the dct_references_s validator' do
|
111
|
+
expect(enhanced_geobl).to receive(:dct_references_validate!)
|
112
|
+
enhanced_geobl.valid?
|
113
|
+
end
|
114
|
+
it 'validates spatial bounding box' do
|
115
|
+
expect(JSON::Validator).to receive(:validate!).and_return true
|
116
|
+
expect { basic_geobl.valid? }
|
117
|
+
.to raise_error GeoCombine::Exceptions::InvalidGeometry
|
118
|
+
end
|
119
|
+
end
|
120
|
+
describe '#dct_references_validate!' do
|
121
|
+
context 'with valid document' do
|
122
|
+
it 'is valid' do
|
123
|
+
expect(full_geobl.dct_references_validate!).to be true
|
124
|
+
end
|
125
|
+
end
|
126
|
+
context 'with invalid document' do
|
127
|
+
let(:unparseable_json) do
|
128
|
+
<<-JSON
|
129
|
+
{
|
130
|
+
\"http://schema.org/url\":\"http://example.com/abc123\",,
|
131
|
+
\"http://schema.org/downloadUrl\":\"http://example.com/abc123/data.zip\"
|
132
|
+
}
|
133
|
+
JSON
|
134
|
+
end
|
135
|
+
let(:bad_ref) do
|
136
|
+
GeoCombine::Geoblacklight.new(
|
137
|
+
basic_geoblacklight, 'dct_references_s' => unparseable_json, 'layer_geom_type_s' => 'esriGeometryPolygon'
|
138
|
+
)
|
139
|
+
end
|
140
|
+
let(:not_hash) do
|
141
|
+
GeoCombine::Geoblacklight.new(
|
142
|
+
basic_geoblacklight, 'dct_references_s' => '[{}]', 'layer_geom_type_s' => 'esriGeometryPolygon'
|
143
|
+
)
|
144
|
+
end
|
145
|
+
before do
|
146
|
+
bad_ref.enhance_metadata
|
147
|
+
not_hash.enhance_metadata
|
148
|
+
end
|
149
|
+
it 'unparseable json' do
|
150
|
+
expect { bad_ref.dct_references_validate! }.to raise_error JSON::ParserError
|
151
|
+
end
|
152
|
+
it 'not a hash' do
|
153
|
+
expect { not_hash.dct_references_validate! }.to raise_error GeoCombine::Exceptions::InvalidDCTReferences
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
157
|
+
describe 'spatial_validate!' do
|
158
|
+
context 'when valid' do
|
159
|
+
it { full_geobl.spatial_validate! }
|
160
|
+
end
|
161
|
+
context 'when invalid' do
|
162
|
+
it { expect { basic_geobl.spatial_validate! }.to raise_error GeoCombine::Exceptions::InvalidGeometry }
|
163
|
+
end
|
164
|
+
end
|
165
|
+
describe 'upgrade_to_v1' do
|
166
|
+
before do
|
167
|
+
expect(pre_v1_geobl).to receive(:upgrade_to_v1).and_call_original
|
168
|
+
pre_v1_geobl.enhance_metadata
|
169
|
+
end
|
170
|
+
|
171
|
+
it 'tags with version' do
|
172
|
+
expect(pre_v1_geobl.metadata).to include('geoblacklight_version' => '1.0')
|
173
|
+
end
|
174
|
+
|
175
|
+
it 'properly deprecates fields' do
|
176
|
+
described_class::DEPRECATED_KEYS_V1.each do |k|
|
177
|
+
expect(pre_v1_geobl.metadata.keys).not_to include(k.to_s)
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
it 'normalizes slugs' do
|
182
|
+
expect(pre_v1_geobl.metadata).to include('layer_slug_s' => 'sde-columbia-esri-arcatlas-snow-ln')
|
183
|
+
end
|
58
184
|
end
|
59
185
|
end
|
@@ -19,7 +19,7 @@ RSpec.describe GeoCombine::Iso19139 do
|
|
19
19
|
end
|
20
20
|
end
|
21
21
|
describe '#to_geoblacklight' do
|
22
|
-
let(:valid_geoblacklight) { iso_object.to_geoblacklight('layer_geom_type_s' => 'Polygon'
|
22
|
+
let(:valid_geoblacklight) { iso_object.to_geoblacklight('layer_geom_type_s' => 'Polygon') }
|
23
23
|
it 'should create a GeoCombine::Geoblacklight object' do
|
24
24
|
expect(valid_geoblacklight).to be_an GeoCombine::Geoblacklight
|
25
25
|
end
|
@@ -27,6 +27,9 @@ RSpec.describe GeoCombine::Iso19139 do
|
|
27
27
|
valid_geoblacklight.enhance_metadata
|
28
28
|
expect(valid_geoblacklight.valid?).to be_truthy
|
29
29
|
end
|
30
|
+
it 'should have geoblacklight_version' do
|
31
|
+
expect(valid_geoblacklight.metadata['geoblacklight_version']).to eq '1.0'
|
32
|
+
end
|
30
33
|
it 'should have dc_creator_sm' do
|
31
34
|
expect(valid_geoblacklight.metadata["dc_creator_sm"]).to be_an Array
|
32
35
|
expect(valid_geoblacklight.metadata["dc_creator_sm"]).to eq ["Circuit Rider Productions"]
|
@@ -34,7 +37,7 @@ RSpec.describe GeoCombine::Iso19139 do
|
|
34
37
|
it 'should have dc_publisher_sm' do
|
35
38
|
expect(valid_geoblacklight.metadata["dc_publisher_sm"]).to be_an Array
|
36
39
|
expect(valid_geoblacklight.metadata["dc_publisher_sm"]).to eq ["Circuit Rider Productions"]
|
37
|
-
end
|
40
|
+
end
|
38
41
|
end
|
39
42
|
describe '#to_html' do
|
40
43
|
it 'should create a transformation of the metadata as a String' do
|
@@ -0,0 +1,163 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
RSpec.describe GeoCombine::OGP do
|
4
|
+
include JsonDocs
|
5
|
+
|
6
|
+
subject(:ogp) { GeoCombine::OGP.new(ogp_harvard_raster) }
|
7
|
+
let(:ogp_tufts) { GeoCombine::OGP.new(ogp_tufts_vector) }
|
8
|
+
let(:ogp_line) { GeoCombine::OGP.new(ogp_harvard_line) }
|
9
|
+
let(:metadata) { ogp.instance_variable_get(:@metadata) }
|
10
|
+
|
11
|
+
describe '#initialize' do
|
12
|
+
it 'parses JSON into metadata Hash' do
|
13
|
+
expect(metadata).to be_an Hash
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
describe '#to_geoblacklight' do
|
18
|
+
it 'calls geoblacklight_terms to create a GeoBlacklight object' do
|
19
|
+
expect(ogp).to receive(:geoblacklight_terms).and_return({})
|
20
|
+
expect(ogp.to_geoblacklight).to be_an GeoCombine::Geoblacklight
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
describe '#geoblacklight_terms' do
|
25
|
+
describe 'builds a hash which maps metadata' do
|
26
|
+
it 'with dc_identifier_s' do
|
27
|
+
expect(ogp.geoblacklight_terms).to include(dc_identifier_s: 'HARVARD.SDE2.G1059_W57_1654_PF_SH1')
|
28
|
+
end
|
29
|
+
it 'with dc_title_s' do
|
30
|
+
expect(ogp.geoblacklight_terms).to include(dc_title_s: 'World Map, 1654 (Raster Image)')
|
31
|
+
end
|
32
|
+
it 'with dc_description_s sanitized' do
|
33
|
+
expect(ogp.geoblacklight_terms).to include(dc_description_s: metadata['Abstract'])
|
34
|
+
end
|
35
|
+
it 'with dc_rights_s' do
|
36
|
+
expect(ogp.geoblacklight_terms).to include(dc_rights_s: 'Public')
|
37
|
+
expect(ogp_line.geoblacklight_terms).to include(dc_rights_s: 'Restricted')
|
38
|
+
end
|
39
|
+
it 'with dct_provenance_s' do
|
40
|
+
expect(ogp.geoblacklight_terms).to include(dct_provenance_s: 'Harvard')
|
41
|
+
end
|
42
|
+
it 'with dct_references_s' do
|
43
|
+
expect(ogp.geoblacklight_terms).to include(:dct_references_s)
|
44
|
+
end
|
45
|
+
it 'with layer_id_s that is blank' do
|
46
|
+
expect(ogp.geoblacklight_terms)
|
47
|
+
.to include(layer_id_s: "#{metadata['WorkspaceName']}:#{metadata['Name']}")
|
48
|
+
end
|
49
|
+
it 'with layer_geom_type_s' do
|
50
|
+
expect(ogp.geoblacklight_terms).to include(:layer_geom_type_s)
|
51
|
+
end
|
52
|
+
it 'with layer_slug_s' do
|
53
|
+
expect(ogp.geoblacklight_terms)
|
54
|
+
.to include(layer_slug_s: 'harvard-g1059-w57-1654-pf-sh1')
|
55
|
+
end
|
56
|
+
it 'with solr_geom' do
|
57
|
+
expect(ogp.geoblacklight_terms).to include(:solr_geom)
|
58
|
+
end
|
59
|
+
it 'with dc_subject_sm' do
|
60
|
+
expect(ogp.geoblacklight_terms).to include(
|
61
|
+
dc_subject_sm: [
|
62
|
+
'Maps', 'Human settlements', 'Cities and towns', 'Villages',
|
63
|
+
'Bodies of water', 'Landforms', 'Transportation',
|
64
|
+
'imageryBaseMapsEarthCover'
|
65
|
+
]
|
66
|
+
)
|
67
|
+
end
|
68
|
+
it 'with dct_spatial_sm' do
|
69
|
+
expect(ogp.geoblacklight_terms).to include(
|
70
|
+
dct_spatial_sm: [
|
71
|
+
'Earth', 'Northern Hemisphere', 'Southern Hemisphere',
|
72
|
+
'Eastern Hemisphere', 'Western Hemisphere', 'Africa', 'Asia',
|
73
|
+
'Australia', 'Europe', 'North America', 'South America',
|
74
|
+
'Arctic regions'
|
75
|
+
]
|
76
|
+
)
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
describe '#ogp_geom' do
|
82
|
+
it 'when Paper Map use Raster' do
|
83
|
+
expect(ogp.ogp_geom).to eq 'Raster'
|
84
|
+
end
|
85
|
+
it 'anything else, return it' do
|
86
|
+
expect(ogp_tufts.ogp_geom).to eq 'Polygon'
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
describe '#ogp_formats' do
|
91
|
+
context 'when Paper Map or Raster' do
|
92
|
+
it 'returns GeoTIFF' do
|
93
|
+
%w[Raster Paper\ Map].each do |datatype|
|
94
|
+
expect(ogp).to receive(:metadata).and_return('DataType' => datatype)
|
95
|
+
expect(ogp.ogp_formats).to eq 'GeoTIFF'
|
96
|
+
end
|
97
|
+
|
98
|
+
end
|
99
|
+
end
|
100
|
+
context 'when Polygon, Line, or Point' do
|
101
|
+
it 'returns Shapefile' do
|
102
|
+
%w[Polygon Line Point].each do |datatype|
|
103
|
+
expect(ogp).to receive(:metadata).and_return('DataType' => datatype)
|
104
|
+
expect(ogp.ogp_formats).to eq 'Shapefile'
|
105
|
+
end
|
106
|
+
end
|
107
|
+
end
|
108
|
+
context 'unknown data types' do
|
109
|
+
it 'throws exception' do
|
110
|
+
expect(ogp).to receive(:metadata).twice.and_return('DataType' => 'Unknown')
|
111
|
+
expect { ogp.ogp_formats }.to raise_error(ArgumentError, 'Unknown')
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
describe '#envelope' do
|
117
|
+
it 'properly formatted envelope' do
|
118
|
+
expect(ogp.envelope).to eq 'ENVELOPE(-180.0, 180.0, 90.0, -90.0)'
|
119
|
+
end
|
120
|
+
it 'fails on out-of-bounds envelopes' do
|
121
|
+
expect(ogp).to receive(:west).and_return(-181)
|
122
|
+
expect { ogp.envelope }.to raise_error(ArgumentError)
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
describe '#references' do
|
127
|
+
context 'harvard raster' do
|
128
|
+
it 'has wms and download services' do
|
129
|
+
expect(JSON.parse(ogp.references)).to include(
|
130
|
+
'http://www.opengis.net/def/serviceType/ogc/wms' => 'http://pelham.lib.harvard.edu:8080/geoserver/wms',
|
131
|
+
'http://schema.org/DownloadAction' => 'http://pelham.lib.harvard.edu:8080/HGL/HGLOpenDelivery'
|
132
|
+
)
|
133
|
+
end
|
134
|
+
end
|
135
|
+
context 'tufts vector' do
|
136
|
+
it 'has wms wfs services' do
|
137
|
+
expect(JSON.parse(ogp_tufts.references)).to include(
|
138
|
+
'http://www.opengis.net/def/serviceType/ogc/wms' => 'http://geoserver01.uit.tufts.edu/wms',
|
139
|
+
'http://www.opengis.net/def/serviceType/ogc/wfs' => 'http://geoserver01.uit.tufts.edu/wfs'
|
140
|
+
)
|
141
|
+
end
|
142
|
+
end
|
143
|
+
context 'harvard line' do
|
144
|
+
it 'has restricted services' do
|
145
|
+
expect(JSON.parse(ogp_line.references)).to include(
|
146
|
+
'http://www.opengis.net/def/serviceType/ogc/wfs' => 'http://hgl.harvard.edu:8080/geoserver/wfs',
|
147
|
+
'http://www.opengis.net/def/serviceType/ogc/wms' => 'http://hgl.harvard.edu:8080/geoserver/wms'
|
148
|
+
)
|
149
|
+
expect(JSON.parse(ogp_line.references)).not_to include('http://schema.org/DownloadAction')
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
describe 'valid geoblacklight schema' do
|
155
|
+
context 'harvard' do
|
156
|
+
it { expect { ogp.to_geoblacklight.valid? }.to_not raise_error }
|
157
|
+
it { expect { ogp_line.to_geoblacklight.valid? }.to_not raise_error }
|
158
|
+
end
|
159
|
+
context 'tufts' do
|
160
|
+
it { expect { ogp_tufts.to_geoblacklight.valid? }.to_not raise_error }
|
161
|
+
end
|
162
|
+
end
|
163
|
+
end
|