rdf-vocab 3.1.2 → 3.1.8
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/LICENSE +1 -1
- data/README.md +69 -45
- data/VERSION +1 -1
- data/lib/rdf/vocab.rb +60 -40
- data/lib/rdf/vocab/acl.rb +104 -61
- data/lib/rdf/vocab/as.rb +556 -91
- data/lib/rdf/vocab/bf2.rb +1791 -475
- data/lib/rdf/vocab/bibframe.rb +1 -2
- data/lib/rdf/vocab/bibo.rb +616 -124
- data/lib/rdf/vocab/cc.rb +125 -48
- data/lib/rdf/vocab/cert.rb +68 -1
- data/lib/rdf/vocab/cnt.rb +79 -1
- data/lib/rdf/vocab/crm.rb +1543 -776
- data/lib/rdf/vocab/datacite.rb +1 -1
- data/lib/rdf/vocab/dbo.rb +10463 -1
- data/lib/rdf/vocab/dc.rb +485 -99
- data/lib/rdf/vocab/dc11.rb +93 -31
- data/lib/rdf/vocab/dcat.rb +236 -88
- data/lib/rdf/vocab/dcmitype.rb +63 -13
- data/lib/rdf/vocab/disco.rb +286 -59
- data/lib/rdf/vocab/doap.rb +175 -1
- data/lib/rdf/vocab/dwc.rb +941 -189
- data/lib/rdf/vocab/earl.rb +324 -0
- data/lib/rdf/vocab/ebucore.rb +7255 -2184
- data/lib/rdf/vocab/edm.rb +202 -76
- data/lib/rdf/vocab/exif.rb +650 -1
- data/lib/rdf/vocab/extensions.rb +257 -203
- data/lib/rdf/vocab/fcrepo4.rb +398 -22
- data/lib/rdf/vocab/foaf.rb +380 -76
- data/lib/rdf/vocab/geo.rb +47 -110
- data/lib/rdf/vocab/geojson.rb +82 -17
- data/lib/rdf/vocab/geonames.rb +1508 -1351
- data/lib/rdf/vocab/gr.rb +861 -565
- data/lib/rdf/vocab/gs1.rb +1597 -1
- data/lib/rdf/vocab/ht.rb +204 -41
- data/lib/rdf/vocab/hydra.rb +335 -60
- data/lib/rdf/vocab/iana.rb +301 -114
- data/lib/rdf/vocab/ical.rb +534 -121
- data/lib/rdf/vocab/identifiers.rb +459 -499
- data/lib/rdf/vocab/iiif.rb +161 -26
- data/lib/rdf/vocab/jsonld.rb +179 -53
- data/lib/rdf/vocab/ldp.rb +130 -1
- data/lib/rdf/vocab/lrmi.rb +85 -17
- data/lib/rdf/vocab/ma.rb +460 -80
- data/lib/rdf/vocab/mads.rb +610 -22
- data/lib/rdf/vocab/{marc_relators.rb → marcrelators.rb} +813 -4
- data/lib/rdf/vocab/mo.rb +1115 -540
- data/lib/rdf/vocab/mods.rb +635 -124
- data/lib/rdf/vocab/nfo.rb +1 -1
- data/lib/rdf/vocab/oa.rb +343 -73
- data/lib/rdf/vocab/og.rb +100 -1
- data/lib/rdf/vocab/ogc.rb +42 -9
- data/lib/rdf/vocab/ore.rb +69 -14
- data/lib/rdf/vocab/org.rb +235 -47
- data/lib/rdf/vocab/pcdm.rb +58 -34
- data/lib/rdf/vocab/pplan.rb +21 -136
- data/lib/rdf/vocab/premis.rb +1171 -779
- data/lib/rdf/vocab/{premis_event_type.rb → premiseventtype.rb} +156 -2
- data/lib/rdf/vocab/prov.rb +700 -107
- data/lib/rdf/vocab/ptr.rb +172 -37
- data/lib/rdf/vocab/rightsstatements.rb +17 -13
- data/lib/rdf/vocab/rsa.rb +30 -1
- data/lib/rdf/vocab/rss.rb +52 -11
- data/lib/rdf/vocab/schema.rb +14970 -3993
- data/lib/rdf/vocab/schemas.rb +27066 -0
- data/lib/rdf/vocab/sd.rb +372 -0
- data/lib/rdf/vocab/sh.rb +919 -184
- data/lib/rdf/vocab/sioc.rb +498 -98
- data/lib/rdf/vocab/{sioc_services.rb → siocservices.rb} +46 -9
- data/lib/rdf/vocab/sioctypes.rb +434 -0
- data/lib/rdf/vocab/skos.rb +160 -44
- data/lib/rdf/vocab/skosxl.rb +40 -11
- data/lib/rdf/vocab/v.rb +323 -37
- data/lib/rdf/vocab/vcard.rb +625 -119
- data/lib/rdf/vocab/vmd.rb +323 -37
- data/lib/rdf/vocab/void.rb +147 -27
- data/lib/rdf/vocab/vs.rb +23 -5
- data/lib/rdf/vocab/wdrs.rb +112 -23
- data/lib/rdf/vocab/wot.rb +96 -19
- data/lib/rdf/vocab/xhtml.rb +2 -1
- data/lib/rdf/vocab/xhv.rb +422 -201
- data/lib/rdf/vocab/xkos.rb +156 -7
- data/spec/extensions_spec.rb +68 -0
- data/spec/vocab_spec.rb +10 -0
- metadata +20 -11
- data/lib/rdf/vocab/sioct.rb +0 -277
data/lib/rdf/vocab/extensions.rb
CHANGED
@@ -21,7 +21,7 @@ module RDF
|
|
21
21
|
# Ruby's autoloading facility, meaning that `@@subclasses` will be
|
22
22
|
# empty until each subclass has been touched or require'd.
|
23
23
|
RDF::Vocab::VOCABS.each do |n, params|
|
24
|
-
clsname = params
|
24
|
+
clsname = params[:class_name].to_sym
|
25
25
|
RDF::Vocab.const_get(clsname) # Forces class to load
|
26
26
|
end unless @classes_loaded
|
27
27
|
@classes_loaded = true
|
@@ -29,236 +29,290 @@ module RDF
|
|
29
29
|
_orig_each(&block)
|
30
30
|
end
|
31
31
|
|
32
|
-
|
32
|
+
##
|
33
|
+
# A hash of all vocabularies by prefix showing relevant URI and
|
34
|
+
# associated vocabulary Class Name
|
35
|
+
#
|
36
|
+
# @return [Hash{Symbol => Hash{Symbol => String}}]
|
37
|
+
#alias_method :_orig_vocab_map, :vocab_map
|
38
|
+
def vocab_map
|
39
|
+
@vocab_map ||= RDF::VOCABS.transform_values(&:freeze).merge(
|
40
|
+
RDF::Vocab::VOCABS.transform_values(&:freeze))
|
41
|
+
end
|
42
|
+
|
43
|
+
##
|
44
|
+
# Return the vocabulary based on it's class_name symbol
|
45
|
+
#
|
46
|
+
# @param [Symbol] sym
|
47
|
+
# @return [RDF::Vocabulary]
|
48
|
+
alias_method :_orig_from_sym, :from_sym
|
49
|
+
def from_sym(sym)
|
50
|
+
RDF::Vocab.const_defined?(sym.to_sym) ?
|
51
|
+
RDF::Vocab.const_get(sym.to_sym) : _orig_from_sym(sym)
|
52
|
+
end
|
53
|
+
|
54
|
+
##
|
55
|
+
# Limits iteration over vocabularies to just those selected
|
56
|
+
#
|
57
|
+
# @example limit to set of vocabularies by symbol
|
58
|
+
# RDF::Vocabulary.limit_vocabs(:rdf, :rdfs, :schema)
|
59
|
+
# RDF::Vocabulary.find_term('http://schema.org/CreativeWork').pname
|
60
|
+
# # => 'schema:CreativeWork'
|
61
|
+
#
|
62
|
+
# @example limit to set of vocabularies by class name
|
63
|
+
# RDF::Vocabulary.limit_vocabs(RDF::RDFV, RDF::RDFS, RDF::Vocab::SCHEMA)
|
64
|
+
# RDF::Vocabulary.find_term('http://schema.org/CreativeWork').pname
|
65
|
+
# # => 'schema:CreativeWork'
|
66
|
+
#
|
67
|
+
# @param [Array<symbol, RDF::Vocabulary>] vocabs
|
68
|
+
# A list of vocabularies (symbols or classes) which may
|
69
|
+
# be returned by {Vocabulary.each}. Also limits
|
70
|
+
# vocabularies that will be inspeced for other methods.
|
71
|
+
# Set to nil, or an empty array to reset.
|
72
|
+
# @return [Array<RDF::Vocabulary>]
|
73
|
+
def limit_vocabs(*vocabs)
|
74
|
+
@vocabs = if Array(vocabs).empty?
|
75
|
+
nil
|
76
|
+
else
|
77
|
+
@classes_loaded = true
|
78
|
+
vocabs.map do |vocab|
|
79
|
+
if vocab == :rdf || vocab == :rdfv
|
80
|
+
RDF::RDFV
|
81
|
+
elsif vocab.is_a?(Symbol) && RDF::Vocab::VOCABS.key?(vocab)
|
82
|
+
RDF::Vocab.const_get(RDF::Vocab::VOCABS[vocab][:class_name].to_sym)
|
83
|
+
else
|
84
|
+
vocab
|
85
|
+
end
|
86
|
+
end.compact
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
##
|
91
|
+
# Generate Turtle representation, specific to vocabularies
|
92
|
+
#
|
93
|
+
# @param [RDF::Queryable] :graph Optional graph, otherwise uses statements from vocabulary.
|
94
|
+
# @param [Hash{#to_sym => String}] Prefixes to add to output
|
95
|
+
# @return [String]
|
96
|
+
def to_ttl(graph: nil, prefixes: nil)
|
33
97
|
require 'rdf/turtle'
|
34
|
-
|
35
|
-
# Generate Turtle representation, specific to vocabularies
|
36
|
-
#
|
37
|
-
# @param [RDF::Queryable] :graph Optional graph, otherwise uses statements from vocabulary.
|
38
|
-
# @param [Hash{#to_sym => String}] Prefixes to add to output
|
39
|
-
# @return [String]
|
40
|
-
def to_ttl(graph: nil, prefixes: nil)
|
41
|
-
output = []
|
42
|
-
|
43
|
-
# Find namespaces used in the vocabulary
|
44
|
-
graph = RDF::Graph.new {|g| each_statement {|s| g << s}} if graph.nil? || graph.empty?
|
45
|
-
|
46
|
-
prefixes = vocab_prefixes(graph).merge(prefixes || {})
|
47
|
-
pfx_width = prefixes.keys.map(&:to_s).map(&:length).max
|
48
|
-
prefixes.each do |pfx, uri|
|
49
|
-
output << "@prefix %*s: <%s> .\n" % [pfx_width, pfx, uri]
|
50
|
-
end
|
98
|
+
output = []
|
51
99
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
writer = RDF::Turtle::Writer.new(StringIO.new, prefixes: prefixes)
|
56
|
-
|
57
|
-
{
|
58
|
-
ont: {
|
59
|
-
heading: "# #{__name__.split('::').last} Vocabulary definition\n"
|
60
|
-
},
|
61
|
-
classes: {
|
62
|
-
heading: "# Class definitions\n"
|
63
|
-
},
|
64
|
-
properties: {
|
65
|
-
heading: "# Property definitions\n"
|
66
|
-
},
|
67
|
-
datatypes: {
|
68
|
-
heading: "# Datatype definitions\n"
|
69
|
-
},
|
70
|
-
other: {
|
71
|
-
heading: "# Other definitions\n"
|
72
|
-
}
|
73
|
-
}.each do |key, hash|
|
74
|
-
next unless cats[key]
|
75
|
-
|
76
|
-
output << "\n\n#{hash[:heading]}"
|
77
|
-
|
78
|
-
cats[key].each do |subject|
|
79
|
-
po = {}
|
80
|
-
|
81
|
-
# Group predicates with their values
|
82
|
-
graph.query(subject: subject) do |statement|
|
83
|
-
# Sanity check this, as these are set to an empty string if not defined.
|
84
|
-
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
85
|
-
po[statement.predicate] ||= []
|
86
|
-
po[statement.predicate] << statement.object
|
87
|
-
end
|
100
|
+
# Find namespaces used in the vocabulary
|
101
|
+
graph = RDF::Graph.new {|g| each_statement {|s| g << s}} if graph.nil? || graph.empty?
|
88
102
|
|
89
|
-
|
103
|
+
prefixes = vocab_prefixes(graph).merge(prefixes || {})
|
104
|
+
pfx_width = prefixes.keys.map(&:to_s).map(&:length).max
|
105
|
+
prefixes.each do |pfx, uri|
|
106
|
+
output << "@prefix %*s: <%s> .\n" % [pfx_width, pfx, uri]
|
107
|
+
end
|
90
108
|
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
109
|
+
# Determine the category for each subject in the vocabulary graph
|
110
|
+
cats = subject_categories(graph)
|
111
|
+
|
112
|
+
writer = RDF::Turtle::Writer.new(StringIO.new, prefixes: prefixes)
|
113
|
+
|
114
|
+
{
|
115
|
+
ont: {
|
116
|
+
heading: "# #{__name__.split('::').last} Vocabulary definition\n"
|
117
|
+
},
|
118
|
+
classes: {
|
119
|
+
heading: "# Class definitions\n"
|
120
|
+
},
|
121
|
+
properties: {
|
122
|
+
heading: "# Property definitions\n"
|
123
|
+
},
|
124
|
+
datatypes: {
|
125
|
+
heading: "# Datatype definitions\n"
|
126
|
+
},
|
127
|
+
other: {
|
128
|
+
heading: "# Other definitions\n"
|
129
|
+
}
|
130
|
+
}.each do |key, hash|
|
131
|
+
next unless cats[key]
|
95
132
|
|
96
|
-
|
97
|
-
po.each do |predicate, objects|
|
98
|
-
resource = predicate.qname ? predicate.pname : "<#{predicate}>"
|
99
|
-
po_list << resource + ' ' + objects.map {|o| writer.format_term(o)}.join(", ")
|
100
|
-
end
|
133
|
+
output << "\n\n#{hash[:heading]}"
|
101
134
|
|
102
|
-
|
103
|
-
|
104
|
-
|
135
|
+
cats[key].each do |subject|
|
136
|
+
po = {}
|
137
|
+
|
138
|
+
# Group predicates with their values
|
139
|
+
graph.query(subject: subject) do |statement|
|
140
|
+
# Sanity check this, as these are set to an empty string if not defined.
|
141
|
+
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
142
|
+
po[statement.predicate] ||= []
|
143
|
+
po[statement.predicate] << statement.object
|
144
|
+
end
|
145
|
+
|
146
|
+
next if po.empty?
|
147
|
+
|
148
|
+
po_list = []
|
149
|
+
unless (types = po.delete(RDF.type)).empty?
|
150
|
+
po_list << 'a ' + types.map {|o| writer.format_term(o)}.join(", ")
|
151
|
+
end
|
152
|
+
|
153
|
+
# Serialize other predicate/objects
|
154
|
+
po.each do |predicate, objects|
|
155
|
+
resource = predicate.qname ? predicate.pname : "<#{predicate}>"
|
156
|
+
po_list << resource + ' ' + objects.map {|o| writer.format_term(o)}.join(", ")
|
105
157
|
end
|
106
|
-
end
|
107
158
|
|
108
|
-
|
159
|
+
# Output statements for this subject
|
160
|
+
subj = subject.qname ? subject.pname : "<#{subject}>"
|
161
|
+
output << "#{subj} " + po_list.join(";\n ") + "\n .\n"
|
162
|
+
end
|
109
163
|
end
|
164
|
+
|
165
|
+
output.join("")
|
110
166
|
rescue LoadError
|
111
167
|
# No Turtle serialization unless gem loaded
|
112
168
|
end
|
113
169
|
|
114
|
-
|
170
|
+
##
|
171
|
+
# Generate JSON-LD representation, specific to vocabularies
|
172
|
+
#
|
173
|
+
# @param [RDF::Queryable] :graph Optional graph, otherwise uses statements from vocabulary.
|
174
|
+
# @param [Hash{#to_sym => String}] Prefixes to add to output
|
175
|
+
# @return [String]
|
176
|
+
def to_jsonld(graph: nil, prefixes: nil)
|
115
177
|
require 'json/ld'
|
116
178
|
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
"owl:unionOf": {"@type": "@vocab", "@container": "@list"},
|
145
|
-
"rdfs_classes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
146
|
-
"rdfs_properties": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
147
|
-
"rdfs_datatypes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
148
|
-
"rdfs_instances": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"}
|
149
|
-
})
|
150
|
-
rdfs_classes, rdfs_properties, rdfs_datatypes, rdfs_instances = [], [], [], [], []
|
151
|
-
|
152
|
-
ontology = {
|
153
|
-
"@context" => rdfs_context,
|
154
|
-
"@id" => to_uri.to_s
|
155
|
-
}
|
179
|
+
context = {}
|
180
|
+
rdfs_context = ::JSON.parse %({
|
181
|
+
"dc:title": {"@container": "@language"},
|
182
|
+
"dc:description": {"@container": "@language"},
|
183
|
+
"dc:date": {"@type": "xsd:date"},
|
184
|
+
"rdfs:comment": {"@container": "@language"},
|
185
|
+
"rdfs:domain": {"@type": "@vocab"},
|
186
|
+
"rdfs:label": {"@container": "@language"},
|
187
|
+
"rdfs:range": {"@type": "@vocab"},
|
188
|
+
"rdfs:seeAlso": {"@type": "@id"},
|
189
|
+
"rdfs:subClassOf": {"@type": "@vocab"},
|
190
|
+
"rdfs:subPropertyOf": {"@type": "@vocab"},
|
191
|
+
"schema:domainIncludes": {"@type": "@vocab"},
|
192
|
+
"schema:rangeIncludes": {"@type": "@vocab"},
|
193
|
+
"owl:equivalentClass": {"@type": "@vocab"},
|
194
|
+
"owl:equivalentProperty": {"@type": "@vocab"},
|
195
|
+
"owl:oneOf": {"@container": "@list", "@type": "@vocab"},
|
196
|
+
"owl:imports": {"@type": "@id"},
|
197
|
+
"owl:versionInfo": {"@type": "@id"},
|
198
|
+
"owl:inverseOf": {"@type": "@vocab"},
|
199
|
+
"owl:unionOf": {"@type": "@vocab", "@container": "@list"},
|
200
|
+
"rdfs_classes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
201
|
+
"rdfs_properties": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
202
|
+
"rdfs_datatypes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
203
|
+
"rdfs_instances": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"}
|
204
|
+
})
|
205
|
+
rdfs_classes, rdfs_properties, rdfs_datatypes, rdfs_instances = [], [], [], [], []
|
156
206
|
|
157
|
-
|
158
|
-
|
207
|
+
ontology = {
|
208
|
+
"@context" => rdfs_context,
|
209
|
+
"@id" => to_uri.to_s
|
210
|
+
}
|
159
211
|
|
160
|
-
|
161
|
-
|
162
|
-
context[pfx.to_s] = uri.to_s unless pfx.to_s.empty?
|
163
|
-
end
|
212
|
+
# Find namespaces used in the vocabulary
|
213
|
+
graph = RDF::Graph.new {|g| each_statement {|s| g << s}} if graph.nil? || graph.empty?
|
164
214
|
|
165
|
-
|
166
|
-
|
215
|
+
prefixes = vocab_prefixes(graph).merge(prefixes || {})
|
216
|
+
prefixes.each do |pfx, uri|
|
217
|
+
context[pfx.to_s] = uri.to_s unless pfx.to_s.empty?
|
218
|
+
end
|
167
219
|
|
168
|
-
|
169
|
-
|
170
|
-
next unless Array(term.qname).length == 2
|
171
|
-
context[term.qname.last.to_s] = term.to_uri.to_s
|
172
|
-
end
|
220
|
+
# Determine the category for each subject in the vocabulary graph
|
221
|
+
cats = subject_categories(graph)
|
173
222
|
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
heading: "# #{__name__.split('::').last} Vocabulary definition\n",
|
180
|
-
bucket: ontology,
|
181
|
-
},
|
182
|
-
classes: {
|
183
|
-
heading: "# Class definitions\n",
|
184
|
-
bucket: rdfs_classes,
|
185
|
-
rev_prop: "rdfs_classes"
|
186
|
-
},
|
187
|
-
properties: {
|
188
|
-
heading: "# Property definitions\n",
|
189
|
-
bucket: rdfs_properties,
|
190
|
-
rev_prop: "rdfs_properties"
|
191
|
-
},
|
192
|
-
datatypes: {
|
193
|
-
heading: "# Datatype definitions\n",
|
194
|
-
bucket: rdfs_datatypes,
|
195
|
-
rev_prop: "rdfs_datatypes"
|
196
|
-
},
|
197
|
-
other: {
|
198
|
-
heading: "# Other definitions\n",
|
199
|
-
bucket: rdfs_instances,
|
200
|
-
rev_prop: "rdfs_instances"
|
201
|
-
}
|
202
|
-
}.each do |key, hash|
|
203
|
-
next unless cats[key]
|
204
|
-
|
205
|
-
cats[key].each do |subject|
|
206
|
-
node = {"@id" => subject.pname}
|
207
|
-
po = {}
|
208
|
-
|
209
|
-
# Group predicates with their values
|
210
|
-
graph.query(subject: subject) do |statement|
|
211
|
-
# Sanity check this, as these are set to an empty string if not defined.
|
212
|
-
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
213
|
-
po[statement.predicate] ||= []
|
214
|
-
po[statement.predicate] << statement.object
|
215
|
-
end
|
223
|
+
# Generate term definitions from graph subjects
|
224
|
+
cats.values.flatten.each do |term|
|
225
|
+
next unless Array(term.qname).length == 2
|
226
|
+
context[term.qname.last.to_s] = term.to_uri.to_s
|
227
|
+
end
|
216
228
|
|
217
|
-
|
229
|
+
# Parse the two contexts so we know what terms are in scope
|
230
|
+
jld_context = ::JSON::LD::Context.new.parse([context, rdfs_context])
|
231
|
+
|
232
|
+
{
|
233
|
+
ont: {
|
234
|
+
heading: "# #{__name__.split('::').last} Vocabulary definition\n",
|
235
|
+
bucket: ontology,
|
236
|
+
},
|
237
|
+
classes: {
|
238
|
+
heading: "# Class definitions\n",
|
239
|
+
bucket: rdfs_classes,
|
240
|
+
rev_prop: "rdfs_classes"
|
241
|
+
},
|
242
|
+
properties: {
|
243
|
+
heading: "# Property definitions\n",
|
244
|
+
bucket: rdfs_properties,
|
245
|
+
rev_prop: "rdfs_properties"
|
246
|
+
},
|
247
|
+
datatypes: {
|
248
|
+
heading: "# Datatype definitions\n",
|
249
|
+
bucket: rdfs_datatypes,
|
250
|
+
rev_prop: "rdfs_datatypes"
|
251
|
+
},
|
252
|
+
other: {
|
253
|
+
heading: "# Other definitions\n",
|
254
|
+
bucket: rdfs_instances,
|
255
|
+
rev_prop: "rdfs_instances"
|
256
|
+
}
|
257
|
+
}.each do |key, hash|
|
258
|
+
next unless cats[key]
|
259
|
+
|
260
|
+
cats[key].each do |subject|
|
261
|
+
node = {"@id" => subject.pname}
|
262
|
+
po = {}
|
263
|
+
|
264
|
+
# Group predicates with their values
|
265
|
+
graph.query(subject: subject) do |statement|
|
266
|
+
# Sanity check this, as these are set to an empty string if not defined.
|
267
|
+
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
268
|
+
po[statement.predicate] ||= []
|
269
|
+
po[statement.predicate] << statement.object
|
270
|
+
end
|
218
271
|
|
219
|
-
|
272
|
+
next if po.empty?
|
220
273
|
|
221
|
-
|
222
|
-
term = jld_context.compact_iri(predicate, vocab: true)
|
223
|
-
node[term] = if jld_context.container(term) == '@language'
|
224
|
-
lang_map = objects.inject({}) do |memo, o|
|
225
|
-
raise "Language-mapped term #{term} with non plain-literal #{o.inspect}" unless o.literal? && o.plain?
|
226
|
-
memo.merge(o.language.to_s => o.value)
|
227
|
-
end
|
228
|
-
# Don't use language map if there's only one entry with no language
|
229
|
-
lang_map = lang_map[""] if lang_map.keys == [""]
|
230
|
-
[lang_map]
|
231
|
-
else
|
232
|
-
objects.map do |o|
|
233
|
-
expanded_value = jld_context.expand_value(term, o)
|
234
|
-
jld_context.compact_value(term, expanded_value)
|
235
|
-
end
|
236
|
-
end
|
237
|
-
end
|
274
|
+
node['@type'] = po.delete(RDF.type).map {|t| jld_context.compact_iri(t, vocab: true)}
|
238
275
|
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
276
|
+
po.each do |predicate, objects|
|
277
|
+
term = jld_context.compact_iri(predicate, vocab: true)
|
278
|
+
node[term] = if jld_context.container(term) == '@language'
|
279
|
+
lang_map = objects.inject({}) do |memo, o|
|
280
|
+
raise "Language-mapped term #{term} with non plain-literal #{o.inspect}" unless o.literal? && o.plain?
|
281
|
+
memo.merge(o.language.to_s => o.value)
|
282
|
+
end
|
283
|
+
# Don't use language map if there's only one entry with no language
|
284
|
+
lang_map = lang_map[""] if lang_map.keys == [""]
|
285
|
+
[lang_map]
|
286
|
+
else
|
287
|
+
objects.map do |o|
|
288
|
+
expanded_value = jld_context.expand_value(term, o)
|
289
|
+
jld_context.compact_value(term, expanded_value)
|
243
290
|
end
|
244
291
|
end
|
292
|
+
end
|
245
293
|
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
ontology[hash[:rev_prop]] ||= hash[:bucket]
|
251
|
-
hash[:bucket] << node
|
294
|
+
node.each do |property, values|
|
295
|
+
case values.length
|
296
|
+
when 0 then node.delete(property)
|
297
|
+
when 1 then node[property] = values.first
|
252
298
|
end
|
253
299
|
end
|
254
|
-
end
|
255
300
|
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
301
|
+
# Either set bucket from node, or append node to bucket
|
302
|
+
if hash[:bucket].is_a?(Hash)
|
303
|
+
hash[:bucket].merge!(node)
|
304
|
+
else
|
305
|
+
ontology[hash[:rev_prop]] ||= hash[:bucket]
|
306
|
+
hash[:bucket] << node
|
307
|
+
end
|
308
|
+
end
|
261
309
|
end
|
310
|
+
|
311
|
+
# Serialize result
|
312
|
+
{
|
313
|
+
"@context" => context,
|
314
|
+
"@graph" => ontology
|
315
|
+
}.to_json(::JSON::LD::JSON_STATE)
|
262
316
|
rescue LoadError
|
263
317
|
# No JSON-LD serialization unless gem loaded
|
264
318
|
end
|
@@ -266,8 +320,8 @@ module RDF
|
|
266
320
|
##
|
267
321
|
# Generate HTML+RDFa representation, specific to vocabularies. This uses generated JSON-LD and a Haml template.
|
268
322
|
#
|
269
|
-
# @param [RDF::Queryable]
|
270
|
-
# @param [Hash{#to_sym => String}]
|
323
|
+
# @param [RDF::Queryable] graph Optional graph, otherwise uses statements from vocabulary.
|
324
|
+
# @param [Hash{#to_sym => String}] prefixes to add to output
|
271
325
|
# @param [String, Hash] jsonld
|
272
326
|
# If not provided, the `to_jsonld` method is used to generate it.
|
273
327
|
# @param [String] template The path to a Haml or ERB template used to generate the output using the JSON-LD serialization
|
@@ -342,7 +396,7 @@ module RDF
|
|
342
396
|
when /.erb$/
|
343
397
|
require 'erubis'
|
344
398
|
eruby = Erubis::FastEruby.new(File.read(template))
|
345
|
-
|
399
|
+
eruby.evaluate(binding: self, ont: expanded, context: json['@context'], prefixes: prefixes)
|
346
400
|
else
|
347
401
|
raise "Unknown template type #{template}. Should have '.erb' or '.haml' extension"
|
348
402
|
end
|