geo_combine 0.5.1 → 0.6.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.github/workflows/ruby.yml +53 -0
- data/.gitignore +2 -0
- data/.rubocop.yml +20 -0
- data/.rubocop_todo.yml +165 -0
- data/Gemfile +3 -1
- data/README.md +15 -1
- data/Rakefile +4 -2
- data/bin/geocombine +1 -0
- data/geo_combine.gemspec +5 -0
- data/lib/geo_combine/bounding_box.rb +7 -1
- data/lib/geo_combine/ckan_metadata.rb +10 -8
- data/lib/geo_combine/cli.rb +3 -1
- data/lib/geo_combine/esri_open_data.rb +2 -0
- data/lib/geo_combine/exceptions.rb +3 -0
- data/lib/geo_combine/fgdc.rb +2 -2
- data/lib/geo_combine/formats.rb +2 -0
- data/lib/geo_combine/formatting.rb +3 -1
- data/lib/geo_combine/geo_blacklight_harvester.rb +20 -13
- data/lib/geo_combine/geoblacklight.rb +20 -6
- data/lib/geo_combine/geometry_types.rb +2 -0
- data/lib/geo_combine/iso19139.rb +2 -1
- data/lib/geo_combine/ogp.rb +13 -11
- data/lib/geo_combine/railtie.rb +2 -0
- data/lib/geo_combine/subjects.rb +2 -0
- data/lib/geo_combine/version.rb +3 -1
- data/lib/geo_combine.rb +4 -3
- data/lib/tasks/geo_combine.rake +47 -26
- data/lib/xslt/fgdc2html.xsl +38 -9
- data/spec/features/fgdc2html_spec.rb +53 -1
- data/spec/features/iso2html_spec.rb +10 -1
- data/spec/fixtures/docs/princeton_fgdc.xml +374 -0
- data/spec/fixtures/docs/repos.json +3224 -0
- data/spec/fixtures/docs/simple_xml.xml +10 -0
- data/spec/fixtures/docs/simple_xslt.xsl +11 -0
- data/spec/fixtures/docs/stanford_iso.xml +652 -0
- data/spec/fixtures/docs/tufts_fgdc.xml +977 -0
- data/spec/fixtures/indexing/basic_geoblacklight.json +27 -0
- data/spec/fixtures/indexing/geoblacklight.json +33 -0
- data/spec/fixtures/indexing/layers.json +16119 -0
- data/spec/fixtures/indexing/test.txt +1 -0
- data/spec/fixtures/json_docs.rb +2 -0
- data/spec/fixtures/xml_docs.rb +9 -1659
- data/spec/helpers.rb +7 -7
- data/spec/lib/geo_combine/bounding_box_spec.rb +18 -0
- data/spec/lib/geo_combine/ckan_metadata_spec.rb +34 -11
- data/spec/lib/geo_combine/esri_open_data_spec.rb +23 -2
- data/spec/lib/geo_combine/fgdc_spec.rb +41 -10
- data/spec/lib/geo_combine/formatting_spec.rb +13 -5
- data/spec/lib/geo_combine/geo_blacklight_harvester_spec.rb +32 -28
- data/spec/lib/geo_combine/geoblacklight_spec.rb +41 -11
- data/spec/lib/geo_combine/iso19139_spec.rb +26 -14
- data/spec/lib/geo_combine/ogp_spec.rb +28 -8
- data/spec/lib/geo_combine_spec.rb +7 -4
- data/spec/lib/tasks/geo_combine_spec.rb +45 -0
- data/spec/spec_helper.rb +19 -84
- data/spec/support/fixtures.rb +9 -0
- metadata +102 -7
- data/.coveralls.yml +0 -1
- data/.travis.yml +0 -8
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'active_support/core_ext/object/blank'
|
2
4
|
require 'active_support/core_ext/hash/except'
|
3
5
|
require 'open-uri'
|
@@ -10,8 +12,8 @@ module GeoCombine
|
|
10
12
|
|
11
13
|
attr_reader :metadata
|
12
14
|
|
13
|
-
GEOBLACKLIGHT_VERSION = '
|
14
|
-
SCHEMA_JSON_URL = "https://raw.githubusercontent.com/geoblacklight/geoblacklight
|
15
|
+
GEOBLACKLIGHT_VERSION = '1.0'
|
16
|
+
SCHEMA_JSON_URL = "https://raw.githubusercontent.com/geoblacklight/geoblacklight/main/schema/geoblacklight-schema-#{GEOBLACKLIGHT_VERSION}.json"
|
15
17
|
DEPRECATED_KEYS_V1 = %w[
|
16
18
|
uuid
|
17
19
|
georss_polygon_s
|
@@ -29,7 +31,6 @@ module GeoCombine
|
|
29
31
|
# @param [Hash] fields enhancements to metadata that are merged with @metadata
|
30
32
|
def initialize(metadata, fields = {})
|
31
33
|
@metadata = JSON.parse(metadata).merge(fields)
|
32
|
-
@schema = nil
|
33
34
|
end
|
34
35
|
|
35
36
|
##
|
@@ -58,8 +59,7 @@ module GeoCombine
|
|
58
59
|
# Validates a GeoBlacklight-Schema json document
|
59
60
|
# @return [Boolean]
|
60
61
|
def valid?
|
61
|
-
|
62
|
-
JSON::Validator.validate!(@schema, to_json, fragment: '#/properties/layer') &&
|
62
|
+
JSON::Validator.validate!(schema, to_json, fragment: '#/definitions/layer') &&
|
63
63
|
dct_references_validate! &&
|
64
64
|
spatial_validate!
|
65
65
|
end
|
@@ -69,9 +69,14 @@ module GeoCombine
|
|
69
69
|
# @return [Boolean]
|
70
70
|
def dct_references_validate!
|
71
71
|
return true unless metadata.key?('dct_references_s') # TODO: shouldn't we require this field?
|
72
|
+
|
72
73
|
begin
|
73
74
|
ref = JSON.parse(metadata['dct_references_s'])
|
74
|
-
|
75
|
+
unless ref.is_a?(Hash)
|
76
|
+
raise GeoCombine::Exceptions::InvalidDCTReferences,
|
77
|
+
'dct_references must be parsed to a Hash'
|
78
|
+
end
|
79
|
+
|
75
80
|
true
|
76
81
|
rescue JSON::ParserError => e
|
77
82
|
raise e, "Invalid JSON in dct_references_s: #{e.message}"
|
@@ -89,6 +94,7 @@ module GeoCombine
|
|
89
94
|
# GeoBlacklight-Schema format
|
90
95
|
def translate_formats(key, value)
|
91
96
|
return unless key == 'dc_format_s' && formats.include?(value)
|
97
|
+
|
92
98
|
metadata[key] = formats[value]
|
93
99
|
end
|
94
100
|
|
@@ -96,6 +102,7 @@ module GeoCombine
|
|
96
102
|
# Enhances the 'layer_geom_type_s' field by translating from known types
|
97
103
|
def translate_geometry_type(key, value)
|
98
104
|
return unless key == 'layer_geom_type_s' && geometry_types.include?(value)
|
105
|
+
|
99
106
|
metadata[key] = geometry_types[value]
|
100
107
|
end
|
101
108
|
|
@@ -104,6 +111,7 @@ module GeoCombine
|
|
104
111
|
# categories
|
105
112
|
def enhance_subjects(key, value)
|
106
113
|
return unless key == 'dc_subject_sm'
|
114
|
+
|
107
115
|
metadata[key] = value.map do |val|
|
108
116
|
if subjects.include?(val)
|
109
117
|
subjects[val]
|
@@ -118,11 +126,13 @@ module GeoCombine
|
|
118
126
|
# and ISO8601 (for indexing into Solr)
|
119
127
|
def format_proper_date(key, value)
|
120
128
|
return unless key == 'layer_modified_dt'
|
129
|
+
|
121
130
|
metadata[key] = Time.parse(value).utc.iso8601
|
122
131
|
end
|
123
132
|
|
124
133
|
def fields_should_be_array(key, value)
|
125
134
|
return unless should_be_array.include?(key) && !value.is_a?(Array)
|
135
|
+
|
126
136
|
metadata[key] = [value]
|
127
137
|
end
|
128
138
|
|
@@ -155,5 +165,9 @@ module GeoCombine
|
|
155
165
|
# ensure we have a proper v1 record
|
156
166
|
valid?
|
157
167
|
end
|
168
|
+
|
169
|
+
def schema
|
170
|
+
@schema ||= JSON.parse(URI.open(SCHEMA_JSON_URL).read)
|
171
|
+
end
|
158
172
|
end
|
159
173
|
end
|
data/lib/geo_combine/iso19139.rb
CHANGED
data/lib/geo_combine/ogp.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'active_support/core_ext/object/blank'
|
2
4
|
require 'cgi'
|
3
5
|
|
@@ -77,15 +79,13 @@ module GeoCombine
|
|
77
79
|
end
|
78
80
|
|
79
81
|
def date
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
nil
|
84
|
-
end
|
82
|
+
DateTime.rfc3339(metadata['ContentDate'])
|
83
|
+
rescue StandardError
|
84
|
+
nil
|
85
85
|
end
|
86
86
|
|
87
87
|
def year
|
88
|
-
date
|
88
|
+
date&.year
|
89
89
|
end
|
90
90
|
|
91
91
|
##
|
@@ -104,9 +104,9 @@ module GeoCombine
|
|
104
104
|
def ogp_formats
|
105
105
|
case metadata['DataType']
|
106
106
|
when 'Paper Map', 'Raster'
|
107
|
-
|
107
|
+
'GeoTIFF'
|
108
108
|
when 'Polygon', 'Point', 'Line'
|
109
|
-
|
109
|
+
'Shapefile'
|
110
110
|
else
|
111
111
|
raise ArgumentError, metadata['DataType']
|
112
112
|
end
|
@@ -128,6 +128,7 @@ module GeoCombine
|
|
128
128
|
north >= -90 && north <= 90 &&
|
129
129
|
south >= -90 && south <= 90 &&
|
130
130
|
west <= east && south <= north
|
131
|
+
|
131
132
|
"ENVELOPE(#{west}, #{east}, #{north}, #{south})"
|
132
133
|
end
|
133
134
|
|
@@ -165,6 +166,7 @@ module GeoCombine
|
|
165
166
|
|
166
167
|
def download_uri
|
167
168
|
return 'http://schema.org/DownloadAction' if institution == 'Harvard'
|
169
|
+
|
168
170
|
'http://schema.org/downloadUrl'
|
169
171
|
end
|
170
172
|
|
@@ -205,7 +207,7 @@ module GeoCombine
|
|
205
207
|
sluggify(filter_name(name))
|
206
208
|
end
|
207
209
|
|
208
|
-
|
210
|
+
SLUG_STRIP_VALUES = %w[
|
209
211
|
SDE_DATA.
|
210
212
|
SDE.
|
211
213
|
SDE2.
|
@@ -216,8 +218,8 @@ module GeoCombine
|
|
216
218
|
|
217
219
|
def filter_name(name)
|
218
220
|
# strip out schema and usernames
|
219
|
-
|
220
|
-
name.sub!(
|
221
|
+
SLUG_STRIP_VALUES.each do |strip_val|
|
222
|
+
name.sub!(strip_val, '')
|
221
223
|
end
|
222
224
|
unless name.size > 1
|
223
225
|
# use first word of title is empty name
|
data/lib/geo_combine/railtie.rb
CHANGED
data/lib/geo_combine/subjects.rb
CHANGED
data/lib/geo_combine/version.rb
CHANGED
data/lib/geo_combine.rb
CHANGED
@@ -1,10 +1,11 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'nokogiri'
|
2
4
|
require 'json'
|
3
5
|
require 'json-schema'
|
4
6
|
require 'sanitize'
|
5
7
|
|
6
8
|
module GeoCombine
|
7
|
-
|
8
9
|
##
|
9
10
|
# TODO: Create a parse method that can interpret the type of metadata being
|
10
11
|
# passed in.
|
@@ -23,7 +24,7 @@ module GeoCombine
|
|
23
24
|
# @param [String] metadata can be a File path
|
24
25
|
# "./tmp/edu.stanford.purl/bb/338/jh/0716/iso19139.xml" or a String of XML
|
25
26
|
# metadata
|
26
|
-
def initialize
|
27
|
+
def initialize(metadata)
|
27
28
|
metadata = File.read metadata if File.readable? metadata
|
28
29
|
metadata = Nokogiri::XML(metadata) if metadata.instance_of? String
|
29
30
|
@metadata = metadata
|
@@ -35,7 +36,7 @@ module GeoCombine
|
|
35
36
|
# GeoCombine::Geoblacklight on its instantiation
|
36
37
|
# @return [GeoCombine::Geoblacklight] the data transformed into
|
37
38
|
# geoblacklight schema, returned as a GeoCombine::Geoblacklight
|
38
|
-
def to_geoblacklight
|
39
|
+
def to_geoblacklight(fields = {})
|
39
40
|
GeoCombine::Geoblacklight.new(xsl_geoblacklight.apply_to(@metadata), fields)
|
40
41
|
end
|
41
42
|
|
data/lib/tasks/geo_combine.rake
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'net/http'
|
2
4
|
require 'json'
|
3
5
|
require 'rsolr'
|
@@ -5,13 +7,6 @@ require 'find'
|
|
5
7
|
require 'geo_combine/geo_blacklight_harvester'
|
6
8
|
|
7
9
|
namespace :geocombine do
|
8
|
-
commit_within = (ENV['SOLR_COMMIT_WITHIN'] || 5000).to_i
|
9
|
-
ogm_path = ENV['OGM_PATH'] || 'tmp/opengeometadata'
|
10
|
-
solr_url = ENV['SOLR_URL'] || 'http://127.0.0.1:8983/solr/blacklight-core'
|
11
|
-
whitelist = %w[
|
12
|
-
https://github.com/OpenGeoMetadata/big-ten.git
|
13
|
-
]
|
14
|
-
|
15
10
|
desc 'Clone OpenGeoMetadata repositories'
|
16
11
|
task :clone, [:repo] do |_t, args|
|
17
12
|
if args.repo
|
@@ -19,44 +14,44 @@ namespace :geocombine do
|
|
19
14
|
else
|
20
15
|
ogm_api_uri = URI('https://api.github.com/orgs/opengeometadata/repos')
|
21
16
|
ogm_repos = JSON.parse(Net::HTTP.get(ogm_api_uri)).map do |repo|
|
22
|
-
repo['clone_url'] if repo['size']
|
17
|
+
repo['clone_url'] if (repo['size']).positive?
|
23
18
|
end.compact
|
24
|
-
ogm_repos.
|
19
|
+
ogm_repos.reject! { |repo| GeoCombineRake.denylist.include?(repo) }
|
25
20
|
end
|
26
21
|
ogm_repos.each do |repo|
|
27
|
-
system "echo #{repo} && mkdir -p #{ogm_path} && cd #{ogm_path} && git clone --depth 1 #{repo}"
|
22
|
+
Kernel.system "echo #{repo} && mkdir -p #{GeoCombineRake.ogm_path} && cd #{GeoCombineRake.ogm_path} && git clone --depth 1 #{repo}"
|
28
23
|
end
|
29
24
|
end
|
30
25
|
|
31
26
|
desc '"git pull" OpenGeoMetadata repositories'
|
32
27
|
task :pull, [:repo] do |_t, args|
|
33
28
|
paths = if args.repo
|
34
|
-
[File.join(ogm_path, args.repo)]
|
29
|
+
[File.join(GeoCombineRake.ogm_path, args.repo)]
|
35
30
|
else
|
36
|
-
Dir.glob("#{ogm_path}/*")
|
31
|
+
Dir.glob("#{GeoCombineRake.ogm_path}/*")
|
37
32
|
end
|
38
33
|
paths.each do |path|
|
39
34
|
next unless File.directory?(path)
|
40
|
-
|
35
|
+
|
36
|
+
Kernel.system "echo #{path} && cd #{path} && git pull origin"
|
41
37
|
end
|
42
38
|
end
|
43
39
|
|
44
|
-
desc 'Index all
|
40
|
+
desc 'Index all JSON documents except Layers.json'
|
45
41
|
task :index do
|
46
|
-
puts "Indexing #{ogm_path} into #{solr_url}"
|
47
|
-
solr = RSolr.connect url: solr_url, adapter: :net_http_persistent
|
48
|
-
Find.find(ogm_path) do |path|
|
49
|
-
next unless File.basename(path)
|
42
|
+
puts "Indexing #{GeoCombineRake.ogm_path} into #{GeoCombineRake.solr_url}"
|
43
|
+
solr = RSolr.connect url: GeoCombineRake.solr_url, adapter: :net_http_persistent
|
44
|
+
Find.find(GeoCombineRake.ogm_path) do |path|
|
45
|
+
next unless File.basename(path).include?('.json') && File.basename(path) != 'layers.json'
|
46
|
+
|
50
47
|
doc = JSON.parse(File.read(path))
|
51
48
|
[doc].flatten.each do |record|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
puts error
|
59
|
-
end
|
49
|
+
puts "Indexing #{record['layer_slug_s']}: #{path}" if $DEBUG
|
50
|
+
solr.update params: { commitWithin: GeoCombineRake.commit_within, overwrite: true },
|
51
|
+
data: [record].to_json,
|
52
|
+
headers: { 'Content-Type' => 'application/json' }
|
53
|
+
rescue RSolr::Error::Http => e
|
54
|
+
puts e
|
60
55
|
end
|
61
56
|
end
|
62
57
|
solr.commit
|
@@ -71,3 +66,29 @@ namespace :geocombine do
|
|
71
66
|
end
|
72
67
|
end
|
73
68
|
end
|
69
|
+
|
70
|
+
# Class to hold helper methods for use in GeoCombine rake tasks
|
71
|
+
class GeoCombineRake
|
72
|
+
def self.commit_within
|
73
|
+
(ENV['SOLR_COMMIT_WITHIN'] || 5000).to_i
|
74
|
+
end
|
75
|
+
|
76
|
+
def self.denylist
|
77
|
+
[
|
78
|
+
'https://github.com/OpenGeoMetadata/GeoCombine.git',
|
79
|
+
'https://github.com/OpenGeoMetadata/aardvark.git',
|
80
|
+
'https://github.com/OpenGeoMetadata/metadata-issues.git',
|
81
|
+
'https://github.com/OpenGeoMetadata/ogm_utils-python.git',
|
82
|
+
'https://github.com/OpenGeoMetadata/opengeometadata.github.io.git',
|
83
|
+
'https://github.com/OpenGeoMetadata/opengeometadata-rails.git'
|
84
|
+
]
|
85
|
+
end
|
86
|
+
|
87
|
+
def self.ogm_path
|
88
|
+
ENV['OGM_PATH'] || 'tmp/opengeometadata'
|
89
|
+
end
|
90
|
+
|
91
|
+
def self.solr_url
|
92
|
+
ENV['SOLR_URL'] || 'http://127.0.0.1:8983/solr/blacklight-core'
|
93
|
+
end
|
94
|
+
end
|
data/lib/xslt/fgdc2html.xsl
CHANGED
@@ -93,7 +93,7 @@
|
|
93
93
|
<xsl:value-of select="." />
|
94
94
|
</dd>
|
95
95
|
</xsl:for-each>
|
96
|
-
<xsl:for-each select="
|
96
|
+
<xsl:for-each select="descript/supplinf">
|
97
97
|
<dt>Supplemental Information</dt>
|
98
98
|
<dd>
|
99
99
|
<xsl:value-of select="." />
|
@@ -204,6 +204,22 @@
|
|
204
204
|
</dd>
|
205
205
|
</xsl:if>
|
206
206
|
</xsl:for-each>
|
207
|
+
<dt>Temporal Keyword</dt>
|
208
|
+
<xsl:for-each select="keywords/temporal/tempkey">
|
209
|
+
<dd>
|
210
|
+
<xsl:value-of select="." />
|
211
|
+
</dd>
|
212
|
+
<xsl:if test="position()=last()">
|
213
|
+
<dd>
|
214
|
+
<dl>
|
215
|
+
<dt>Temporal Keyword Thesaurus</dt>
|
216
|
+
<dd>
|
217
|
+
<xsl:value-of select="ancestor-or-self::*/tempkt" />
|
218
|
+
</dd>
|
219
|
+
</dl>
|
220
|
+
</dd>
|
221
|
+
</xsl:if>
|
222
|
+
</xsl:for-each>
|
207
223
|
<xsl:for-each select="accconst">
|
208
224
|
<dt>Access Restrictions</dt>
|
209
225
|
<dd>
|
@@ -234,6 +250,9 @@
|
|
234
250
|
<dl>
|
235
251
|
<xsl:for-each select="cntinfo/cntperp/cntper">
|
236
252
|
<dt>Contact Person</dt>
|
253
|
+
<dd>
|
254
|
+
<xsl:value-of select="." />
|
255
|
+
</dd>
|
237
256
|
</xsl:for-each>
|
238
257
|
<xsl:for-each select="cntinfo/cntorgp/cntorg">
|
239
258
|
<dt>Contact Organization</dt>
|
@@ -277,31 +296,31 @@
|
|
277
296
|
<xsl:value-of select="." />
|
278
297
|
</dd>
|
279
298
|
</xsl:for-each>
|
280
|
-
<xsl:for-each select="cntvoice">
|
299
|
+
<xsl:for-each select="cntinfo/cntvoice">
|
281
300
|
<dt>Contact Telephone</dt>
|
282
301
|
<dd>
|
283
302
|
<xsl:value-of select="." />
|
284
303
|
</dd>
|
285
304
|
</xsl:for-each>
|
286
|
-
<xsl:for-each select="cntfax">
|
305
|
+
<xsl:for-each select="cntinfo/cntfax">
|
287
306
|
<dt>Contact Facsimile Telephone</dt>
|
288
307
|
<dd>
|
289
308
|
<xsl:value-of select="." />
|
290
309
|
</dd>
|
291
310
|
</xsl:for-each>
|
292
|
-
<xsl:for-each select="cntemail">
|
311
|
+
<xsl:for-each select="cntinfo/cntemail">
|
293
312
|
<dt>Contact Electronic Mail Address</dt>
|
294
313
|
<dd>
|
295
314
|
<xsl:value-of select="." />
|
296
315
|
</dd>
|
297
316
|
</xsl:for-each>
|
298
|
-
<xsl:for-each select="hours">
|
317
|
+
<xsl:for-each select="cntinfo/hours">
|
299
318
|
<dt>Hours of Service</dt>
|
300
319
|
<dd>
|
301
320
|
<xsl:value-of select="." />
|
302
321
|
</dd>
|
303
322
|
</xsl:for-each>
|
304
|
-
<xsl:for-each select="cntinst">
|
323
|
+
<xsl:for-each select="cntfo/cntinst">
|
305
324
|
<dt>Contact Instructions</dt>
|
306
325
|
<dd>
|
307
326
|
<xsl:value-of select="." />
|
@@ -475,6 +494,12 @@
|
|
475
494
|
</dl>
|
476
495
|
</dd>
|
477
496
|
</xsl:for-each>
|
497
|
+
<xsl:for-each select="srccontr">
|
498
|
+
<dt>Contribution</dt>
|
499
|
+
<dd>
|
500
|
+
<xsl:value-of select="." />
|
501
|
+
</dd>
|
502
|
+
</xsl:for-each>
|
478
503
|
</dl>
|
479
504
|
<xsl:for-each select="procstep">
|
480
505
|
<dt>Process Step</dt>
|
@@ -1186,6 +1211,12 @@
|
|
1186
1211
|
<xsl:value-of select="attrdef" />
|
1187
1212
|
<xsl:apply-templates select="attrdomv" />
|
1188
1213
|
<dl>
|
1214
|
+
<xsl:for-each select="attrdefs">
|
1215
|
+
<dt>Definition Source</dt>
|
1216
|
+
<dd>
|
1217
|
+
<xsl:value-of select="." />
|
1218
|
+
</dd>
|
1219
|
+
</xsl:for-each>
|
1189
1220
|
<xsl:for-each select="begdatea">
|
1190
1221
|
<dt>Beginning Date of Attribute Values</dt>
|
1191
1222
|
<dd>
|
@@ -1234,9 +1265,7 @@
|
|
1234
1265
|
</xsl:template>
|
1235
1266
|
|
1236
1267
|
<xsl:template match="attrdomv[edom]">
|
1237
|
-
<
|
1238
|
-
<button onclick="this.nextElementSibling.style.display = (this.nextElementSibling.style.display==='none') ? '' : 'none';">show/hide coded values</button>
|
1239
|
-
<dl style="display:none">
|
1268
|
+
<dl>
|
1240
1269
|
<xsl:for-each select="edom">
|
1241
1270
|
<dt><xsl:value-of select="edomv" /></dt>
|
1242
1271
|
<dd><xsl:value-of select="edomvd" /></dd>
|
@@ -1,9 +1,12 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'spec_helper'
|
2
4
|
|
3
|
-
#TODO Provide additional expectations on html structure
|
5
|
+
# TODO: Provide additional expectations on html structure
|
4
6
|
describe 'FGDC to html' do
|
5
7
|
include XmlDocs
|
6
8
|
let(:page) { GeoCombine::Fgdc.new(tufts_fgdc).to_html }
|
9
|
+
|
7
10
|
describe 'Identification Information' do
|
8
11
|
it 'has sections' do
|
9
12
|
expect(page).to have_tag '#fgdc-identification-info' do
|
@@ -14,29 +17,78 @@ describe 'FGDC to html' do
|
|
14
17
|
end
|
15
18
|
end
|
16
19
|
end
|
20
|
+
|
17
21
|
describe 'Data Quality Information' do
|
18
22
|
it 'has sections' do
|
19
23
|
expect(page).to have_tag '#fgdc-data-quality-info'
|
20
24
|
end
|
21
25
|
end
|
26
|
+
|
22
27
|
describe 'Spatial Data Organization Information' do
|
23
28
|
it 'has sections' do
|
24
29
|
expect(page).to have_tag '#fgdc-spatialdataorganization-info'
|
25
30
|
end
|
26
31
|
end
|
32
|
+
|
27
33
|
describe 'Entity and Attribute Information' do
|
28
34
|
it 'has sections' do
|
29
35
|
expect(page).to have_tag '#fgdc-spatialreference-info'
|
30
36
|
end
|
31
37
|
end
|
38
|
+
|
32
39
|
describe 'Distribution Information' do
|
33
40
|
it 'has sections' do
|
34
41
|
expect(page).to have_tag '#fgdc-distribution-info'
|
35
42
|
end
|
36
43
|
end
|
44
|
+
|
37
45
|
describe 'Metadata Reference Information' do
|
38
46
|
it 'has sections' do
|
39
47
|
expect(page).to have_tag '#fgdc-metadata-reference-info'
|
40
48
|
end
|
41
49
|
end
|
50
|
+
|
51
|
+
describe 'Point of Contact' do
|
52
|
+
it 'has contact info' do
|
53
|
+
expect(page).to have_tag '#fgdc-identification-info'
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
context 'with fgdc metadata from another institution' do
|
58
|
+
let(:page) { GeoCombine::Fgdc.new(princeton_fgdc).to_html }
|
59
|
+
|
60
|
+
it 'has temporal keywords' do
|
61
|
+
expect(page).to have_tag 'dt', text: 'Temporal Keyword'
|
62
|
+
expect(page).to have_tag 'dd', text: '2030'
|
63
|
+
end
|
64
|
+
|
65
|
+
it 'has supplemental information' do
|
66
|
+
expect(page).to have_tag 'dt', text: 'Supplemental Information'
|
67
|
+
expect(page).to have_tag 'dd', text: /The E\+ scenario/
|
68
|
+
end
|
69
|
+
|
70
|
+
it 'has a contact person' do
|
71
|
+
expect(page).to have_tag 'dt', text: 'Contact Person'
|
72
|
+
expect(page).to have_tag 'dd', text: 'Andrew Pascale'
|
73
|
+
end
|
74
|
+
|
75
|
+
it 'has a contact telephone' do
|
76
|
+
expect(page).to have_tag 'dt', text: 'Contact Telephone'
|
77
|
+
expect(page).to have_tag 'dd', text: '609-258-1097'
|
78
|
+
end
|
79
|
+
|
80
|
+
it 'has an attribute description source and a list of values' do
|
81
|
+
expect(page).to have_tag 'dt', text: 'Definition Source'
|
82
|
+
expect(page).to have_tag 'dd', text: 'Andlinger Center/HMEI'
|
83
|
+
|
84
|
+
# Attribute elements are show by default
|
85
|
+
expect(page).not_to have_tag 'button'
|
86
|
+
expect(page).to have_tag 'dd', text: 'Desert Southwest'
|
87
|
+
end
|
88
|
+
|
89
|
+
it 'has a attribute source contribution' do
|
90
|
+
expect(page).to have_tag 'dt', text: 'Contribution'
|
91
|
+
expect(page).to have_tag 'dd', text: 'Net-Zero America report, 2020'
|
92
|
+
end
|
93
|
+
end
|
42
94
|
end
|
@@ -1,9 +1,12 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'spec_helper'
|
2
4
|
|
3
|
-
#TODO Provide additional expectations on html structure
|
5
|
+
# TODO: Provide additional expectations on html structure
|
4
6
|
describe 'ISO 19139 to html' do
|
5
7
|
include XmlDocs
|
6
8
|
let(:page) { GeoCombine::Iso19139.new(stanford_iso).to_html }
|
9
|
+
|
7
10
|
describe 'Identification Information' do
|
8
11
|
it 'has sections' do
|
9
12
|
expect(page).to have_tag '#iso-identification-info' do
|
@@ -17,36 +20,42 @@ describe 'ISO 19139 to html' do
|
|
17
20
|
end
|
18
21
|
end
|
19
22
|
end
|
23
|
+
|
20
24
|
describe 'Spatial Reference Information' do
|
21
25
|
it 'has sections' do
|
22
26
|
expect(page).to have_tag '#iso-spatial-reference-info'
|
23
27
|
expect(page).to have_tag 'h2', text: 'Identification Information'
|
24
28
|
end
|
25
29
|
end
|
30
|
+
|
26
31
|
describe 'Data Quality Information' do
|
27
32
|
it 'has sections' do
|
28
33
|
expect(page).to have_tag '#iso-data-quality-info'
|
29
34
|
expect(page).to have_tag 'h2', text: 'Data Quality Information'
|
30
35
|
end
|
31
36
|
end
|
37
|
+
|
32
38
|
describe 'Distribution Information' do
|
33
39
|
it 'has sections' do
|
34
40
|
expect(page).to have_tag '#iso-distribution-info'
|
35
41
|
expect(page).to have_tag 'h2', text: 'Distribution Information'
|
36
42
|
end
|
37
43
|
end
|
44
|
+
|
38
45
|
describe 'Content Information' do
|
39
46
|
it 'has sections' do
|
40
47
|
expect(page).to have_tag '#iso-content-info'
|
41
48
|
expect(page).to have_tag 'h2', text: 'Content Information'
|
42
49
|
end
|
43
50
|
end
|
51
|
+
|
44
52
|
describe 'Spatial Representation Information' do
|
45
53
|
it 'has sections' do
|
46
54
|
expect(page).to have_tag '#iso-spatial-representation-info'
|
47
55
|
expect(page).to have_tag 'h2', text: 'Spatial Representation Information'
|
48
56
|
end
|
49
57
|
end
|
58
|
+
|
50
59
|
describe 'Metadata Reference Information' do
|
51
60
|
it 'has sections' do
|
52
61
|
expect(page).to have_tag '#iso-metadata-reference-info'
|