rdf-vocab 3.1.1 → 3.1.7
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/LICENSE +1 -1
- data/README.md +69 -45
- data/VERSION +1 -1
- data/lib/rdf/vocab.rb +55 -39
- data/lib/rdf/vocab/acl.rb +84 -1
- data/lib/rdf/vocab/as.rb +466 -1
- data/lib/rdf/vocab/bf2.rb +1317 -1
- data/lib/rdf/vocab/bibframe.rb +1 -2
- data/lib/rdf/vocab/bibo.rb +496 -1
- data/lib/rdf/vocab/cc.rb +107 -1
- data/lib/rdf/vocab/cert.rb +68 -1
- data/lib/rdf/vocab/cnt.rb +79 -1
- data/lib/rdf/vocab/crm.rb +1303 -1
- data/lib/rdf/vocab/datacite.rb +1 -1
- data/lib/rdf/vocab/dbo.rb +10463 -1
- data/lib/rdf/vocab/dc.rb +470 -301
- data/lib/rdf/vocab/dc11.rb +88 -57
- data/lib/rdf/vocab/dcat.rb +153 -5
- data/lib/rdf/vocab/dcmitype.rb +51 -25
- data/lib/rdf/vocab/disco.rb +228 -1
- data/lib/rdf/vocab/doap.rb +175 -1
- data/lib/rdf/vocab/dwc.rb +753 -1
- data/lib/rdf/vocab/earl.rb +324 -0
- data/lib/rdf/vocab/ebucore.rb +7335 -1974
- data/lib/rdf/vocab/edm.rb +150 -1
- data/lib/rdf/vocab/exif.rb +650 -1
- data/lib/rdf/vocab/extensions.rb +265 -212
- data/lib/rdf/vocab/fcrepo4.rb +377 -1
- data/lib/rdf/vocab/foaf.rb +305 -1
- data/lib/rdf/vocab/geo.rb +39 -1
- data/lib/rdf/vocab/geojson.rb +66 -1
- data/lib/rdf/vocab/geonames.rb +166 -1
- data/lib/rdf/vocab/gr.rb +684 -1
- data/lib/rdf/vocab/gs1.rb +1597 -1
- data/lib/rdf/vocab/ht.rb +164 -1
- data/lib/rdf/vocab/hydra.rb +238 -4
- data/lib/rdf/vocab/iana.rb +230 -1
- data/lib/rdf/vocab/ical.rb +462 -1
- data/lib/rdf/vocab/identifiers.rb +462 -1
- data/lib/rdf/vocab/iiif.rb +136 -1
- data/lib/rdf/vocab/jsonld.rb +147 -3
- data/lib/rdf/vocab/ldp.rb +130 -1
- data/lib/rdf/vocab/lrmi.rb +69 -1
- data/lib/rdf/vocab/ma.rb +381 -1
- data/lib/rdf/vocab/mads.rb +610 -22
- data/lib/rdf/vocab/{marc_relators.rb → marcrelators.rb} +809 -1
- data/lib/rdf/vocab/mo.rb +919 -1
- data/lib/rdf/vocab/mods.rb +516 -1
- data/lib/rdf/vocab/nfo.rb +1 -1
- data/lib/rdf/vocab/oa.rb +275 -1
- data/lib/rdf/vocab/og.rb +100 -1
- data/lib/rdf/vocab/ogc.rb +34 -1
- data/lib/rdf/vocab/ore.rb +56 -1
- data/lib/rdf/vocab/org.rb +189 -1
- data/lib/rdf/vocab/pcdm.rb +47 -1
- data/lib/rdf/vocab/pplan.rb +64 -1
- data/lib/rdf/vocab/premis.rb +1266 -779
- data/lib/rdf/vocab/{premis_event_type.rb → premiseventtype.rb} +155 -1
- data/lib/rdf/vocab/prov.rb +619 -1
- data/lib/rdf/vocab/ptr.rb +138 -1
- data/lib/rdf/vocab/rightsstatements.rb +5 -1
- data/lib/rdf/vocab/rsa.rb +30 -1
- data/lib/rdf/vocab/rss.rb +42 -1
- data/lib/rdf/vocab/schema.rb +12470 -804
- data/lib/rdf/vocab/sd.rb +372 -0
- data/lib/rdf/vocab/sh.rb +736 -1
- data/lib/rdf/vocab/sioc.rb +401 -1
- data/lib/rdf/vocab/{sioc_services.rb → siocservices.rb} +38 -1
- data/lib/rdf/vocab/{sioct.rb → sioctypes.rb} +158 -1
- data/lib/rdf/vocab/skos.rb +119 -1
- data/lib/rdf/vocab/skosxl.rb +30 -1
- data/lib/rdf/vocab/v.rb +291 -1
- data/lib/rdf/vocab/vcard.rb +508 -1
- data/lib/rdf/vocab/vmd.rb +291 -1
- data/lib/rdf/vocab/void.rb +121 -1
- data/lib/rdf/vocab/vs.rb +19 -1
- data/lib/rdf/vocab/wdrs.rb +90 -1
- data/lib/rdf/vocab/wot.rb +78 -1
- data/lib/rdf/vocab/xhtml.rb +2 -1
- data/lib/rdf/vocab/xhv.rb +338 -1
- data/lib/rdf/vocab/xkos.rb +150 -1
- data/spec/extensions_spec.rb +68 -0
- data/spec/vocab_spec.rb +10 -0
- metadata +16 -8
data/lib/rdf/vocab/extensions.rb
CHANGED
@@ -16,250 +16,303 @@ module RDF
|
|
16
16
|
# @return [Enumerator]
|
17
17
|
alias_method :_orig_each, :each
|
18
18
|
def each(&block)
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
end
|
19
|
+
if self.equal?(Vocabulary)
|
20
|
+
# This is needed since all vocabulary classes are defined using
|
21
|
+
# Ruby's autoloading facility, meaning that `@@subclasses` will be
|
22
|
+
# empty until each subclass has been touched or require'd.
|
23
|
+
RDF::Vocab::VOCABS.each do |n, params|
|
24
|
+
clsname = params[:class_name].to_sym
|
25
|
+
RDF::Vocab.const_get(clsname) # Forces class to load
|
26
|
+
end unless @classes_loaded
|
27
|
+
@classes_loaded = true
|
29
28
|
end
|
30
29
|
_orig_each(&block)
|
31
30
|
end
|
32
31
|
|
33
|
-
|
32
|
+
##
|
33
|
+
# A hash of all vocabularies by prefix showing relevant URI and
|
34
|
+
# associated vocabulary Class Name
|
35
|
+
#
|
36
|
+
# @return [Hash{Symbol => Hash{Symbol => String}}]
|
37
|
+
#alias_method :_orig_vocab_map, :vocab_map
|
38
|
+
def vocab_map
|
39
|
+
@vocab_map ||= RDF::VOCABS.transform_values(&:freeze).merge(
|
40
|
+
RDF::Vocab::VOCABS.transform_values(&:freeze))
|
41
|
+
end
|
42
|
+
|
43
|
+
##
|
44
|
+
# Return the vocabulary based on it's class_name symbol
|
45
|
+
#
|
46
|
+
# @param [Symbol] sym
|
47
|
+
# @return [RDF::Vocabulary]
|
48
|
+
alias_method :_orig_from_sym, :from_sym
|
49
|
+
def from_sym(sym)
|
50
|
+
RDF::Vocab.const_defined?(sym.to_sym) ?
|
51
|
+
RDF::Vocab.const_get(sym.to_sym) : _orig_from_sym(sym)
|
52
|
+
end
|
53
|
+
|
54
|
+
##
|
55
|
+
# Limits iteration over vocabularies to just those selected
|
56
|
+
#
|
57
|
+
# @example limit to set of vocabularies by symbol
|
58
|
+
# RDF::Vocabulary.limit_vocabs(:rdf, :rdfs, :schema)
|
59
|
+
# RDF::Vocabulary.find_term('http://schema.org/CreativeWork').pname
|
60
|
+
# # => 'schema:CreativeWork'
|
61
|
+
#
|
62
|
+
# @example limit to set of vocabularies by class name
|
63
|
+
# RDF::Vocabulary.limit_vocabs(RDF::RDFV, RDF::RDFS, RDF::Vocab::SCHEMA)
|
64
|
+
# RDF::Vocabulary.find_term('http://schema.org/CreativeWork').pname
|
65
|
+
# # => 'schema:CreativeWork'
|
66
|
+
#
|
67
|
+
# @param [Array<symbol, RDF::Vocabulary>] vocabs
|
68
|
+
# A list of vocabularies (symbols or classes) which may
|
69
|
+
# be returned by {Vocabulary.each}. Also limits
|
70
|
+
# vocabularies that will be inspeced for other methods.
|
71
|
+
# Set to nil, or an empty array to reset.
|
72
|
+
# @return [Array<RDF::Vocabulary>]
|
73
|
+
def limit_vocabs(*vocabs)
|
74
|
+
@vocabs = if Array(vocabs).empty?
|
75
|
+
nil
|
76
|
+
else
|
77
|
+
@classes_loaded = true
|
78
|
+
vocabs.map do |vocab|
|
79
|
+
if vocab == :rdf || vocab == :rdfv
|
80
|
+
RDF::RDFV
|
81
|
+
elsif vocab.is_a?(Symbol) && RDF::Vocab::VOCABS.key?(vocab)
|
82
|
+
RDF::Vocab.const_get(RDF::Vocab::VOCABS[vocab][:class_name].to_sym)
|
83
|
+
else
|
84
|
+
vocab
|
85
|
+
end
|
86
|
+
end.compact
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
##
|
91
|
+
# Generate Turtle representation, specific to vocabularies
|
92
|
+
#
|
93
|
+
# @param [RDF::Queryable] :graph Optional graph, otherwise uses statements from vocabulary.
|
94
|
+
# @param [Hash{#to_sym => String}] Prefixes to add to output
|
95
|
+
# @return [String]
|
96
|
+
def to_ttl(graph: nil, prefixes: nil)
|
34
97
|
require 'rdf/turtle'
|
35
|
-
|
36
|
-
# Generate Turtle representation, specific to vocabularies
|
37
|
-
#
|
38
|
-
# @param [RDF::Queryable] :graph Optional graph, otherwise uses statements from vocabulary.
|
39
|
-
# @param [Hash{#to_sym => String}] Prefixes to add to output
|
40
|
-
# @return [String]
|
41
|
-
def to_ttl(graph: nil, prefixes: nil)
|
42
|
-
output = []
|
43
|
-
|
44
|
-
# Find namespaces used in the vocabulary
|
45
|
-
graph = RDF::Graph.new {|g| each_statement {|s| g << s}} if graph.nil? || graph.empty?
|
46
|
-
|
47
|
-
prefixes = vocab_prefixes(graph).merge(prefixes || {})
|
48
|
-
pfx_width = prefixes.keys.map(&:to_s).map(&:length).max
|
49
|
-
prefixes.each do |pfx, uri|
|
50
|
-
output << "@prefix %*s: <%s> .\n" % [pfx_width, pfx, uri]
|
51
|
-
end
|
98
|
+
output = []
|
52
99
|
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
writer = RDF::Turtle::Writer.new(StringIO.new, prefixes: prefixes)
|
57
|
-
|
58
|
-
{
|
59
|
-
ont: {
|
60
|
-
heading: "# #{__name__.split('::').last} Vocabulary definition\n"
|
61
|
-
},
|
62
|
-
classes: {
|
63
|
-
heading: "# Class definitions\n"
|
64
|
-
},
|
65
|
-
properties: {
|
66
|
-
heading: "# Property definitions\n"
|
67
|
-
},
|
68
|
-
datatypes: {
|
69
|
-
heading: "# Datatype definitions\n"
|
70
|
-
},
|
71
|
-
other: {
|
72
|
-
heading: "# Other definitions\n"
|
73
|
-
}
|
74
|
-
}.each do |key, hash|
|
75
|
-
next unless cats[key]
|
76
|
-
|
77
|
-
output << "\n\n#{hash[:heading]}"
|
78
|
-
|
79
|
-
cats[key].each do |subject|
|
80
|
-
po = {}
|
81
|
-
|
82
|
-
# Group predicates with their values
|
83
|
-
graph.query(subject: subject) do |statement|
|
84
|
-
# Sanity check this, as these are set to an empty string if not defined.
|
85
|
-
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
86
|
-
po[statement.predicate] ||= []
|
87
|
-
po[statement.predicate] << statement.object
|
88
|
-
end
|
100
|
+
# Find namespaces used in the vocabulary
|
101
|
+
graph = RDF::Graph.new {|g| each_statement {|s| g << s}} if graph.nil? || graph.empty?
|
89
102
|
|
90
|
-
|
103
|
+
prefixes = vocab_prefixes(graph).merge(prefixes || {})
|
104
|
+
pfx_width = prefixes.keys.map(&:to_s).map(&:length).max
|
105
|
+
prefixes.each do |pfx, uri|
|
106
|
+
output << "@prefix %*s: <%s> .\n" % [pfx_width, pfx, uri]
|
107
|
+
end
|
91
108
|
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
109
|
+
# Determine the category for each subject in the vocabulary graph
|
110
|
+
cats = subject_categories(graph)
|
111
|
+
|
112
|
+
writer = RDF::Turtle::Writer.new(StringIO.new, prefixes: prefixes)
|
113
|
+
|
114
|
+
{
|
115
|
+
ont: {
|
116
|
+
heading: "# #{__name__.split('::').last} Vocabulary definition\n"
|
117
|
+
},
|
118
|
+
classes: {
|
119
|
+
heading: "# Class definitions\n"
|
120
|
+
},
|
121
|
+
properties: {
|
122
|
+
heading: "# Property definitions\n"
|
123
|
+
},
|
124
|
+
datatypes: {
|
125
|
+
heading: "# Datatype definitions\n"
|
126
|
+
},
|
127
|
+
other: {
|
128
|
+
heading: "# Other definitions\n"
|
129
|
+
}
|
130
|
+
}.each do |key, hash|
|
131
|
+
next unless cats[key]
|
96
132
|
|
97
|
-
|
98
|
-
po.each do |predicate, objects|
|
99
|
-
resource = predicate.qname ? predicate.pname : "<#{predicate}>"
|
100
|
-
po_list << resource + ' ' + objects.map {|o| writer.format_term(o)}.join(", ")
|
101
|
-
end
|
133
|
+
output << "\n\n#{hash[:heading]}"
|
102
134
|
|
103
|
-
|
104
|
-
|
105
|
-
|
135
|
+
cats[key].each do |subject|
|
136
|
+
po = {}
|
137
|
+
|
138
|
+
# Group predicates with their values
|
139
|
+
graph.query(subject: subject) do |statement|
|
140
|
+
# Sanity check this, as these are set to an empty string if not defined.
|
141
|
+
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
142
|
+
po[statement.predicate] ||= []
|
143
|
+
po[statement.predicate] << statement.object
|
144
|
+
end
|
145
|
+
|
146
|
+
next if po.empty?
|
147
|
+
|
148
|
+
po_list = []
|
149
|
+
unless (types = po.delete(RDF.type)).empty?
|
150
|
+
po_list << 'a ' + types.map {|o| writer.format_term(o)}.join(", ")
|
151
|
+
end
|
152
|
+
|
153
|
+
# Serialize other predicate/objects
|
154
|
+
po.each do |predicate, objects|
|
155
|
+
resource = predicate.qname ? predicate.pname : "<#{predicate}>"
|
156
|
+
po_list << resource + ' ' + objects.map {|o| writer.format_term(o)}.join(", ")
|
106
157
|
end
|
107
|
-
end
|
108
158
|
|
109
|
-
|
159
|
+
# Output statements for this subject
|
160
|
+
subj = subject.qname ? subject.pname : "<#{subject}>"
|
161
|
+
output << "#{subj} " + po_list.join(";\n ") + "\n .\n"
|
162
|
+
end
|
110
163
|
end
|
164
|
+
|
165
|
+
output.join("")
|
111
166
|
rescue LoadError
|
112
167
|
# No Turtle serialization unless gem loaded
|
113
168
|
end
|
114
169
|
|
115
|
-
|
170
|
+
##
|
171
|
+
# Generate JSON-LD representation, specific to vocabularies
|
172
|
+
#
|
173
|
+
# @param [RDF::Queryable] :graph Optional graph, otherwise uses statements from vocabulary.
|
174
|
+
# @param [Hash{#to_sym => String}] Prefixes to add to output
|
175
|
+
# @return [String]
|
176
|
+
def to_jsonld(graph: nil, prefixes: nil)
|
116
177
|
require 'json/ld'
|
117
178
|
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
"owl:unionOf": {"@type": "@vocab", "@container": "@list"},
|
146
|
-
"rdfs_classes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
147
|
-
"rdfs_properties": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
148
|
-
"rdfs_datatypes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
149
|
-
"rdfs_instances": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"}
|
150
|
-
})
|
151
|
-
rdfs_classes, rdfs_properties, rdfs_datatypes, rdfs_instances = [], [], [], [], []
|
152
|
-
|
153
|
-
ontology = {
|
154
|
-
"@context" => rdfs_context,
|
155
|
-
"@id" => to_uri.to_s
|
156
|
-
}
|
179
|
+
context = {}
|
180
|
+
rdfs_context = ::JSON.parse %({
|
181
|
+
"dc:title": {"@container": "@language"},
|
182
|
+
"dc:description": {"@container": "@language"},
|
183
|
+
"dc:date": {"@type": "xsd:date"},
|
184
|
+
"rdfs:comment": {"@container": "@language"},
|
185
|
+
"rdfs:domain": {"@type": "@vocab"},
|
186
|
+
"rdfs:label": {"@container": "@language"},
|
187
|
+
"rdfs:range": {"@type": "@vocab"},
|
188
|
+
"rdfs:seeAlso": {"@type": "@id"},
|
189
|
+
"rdfs:subClassOf": {"@type": "@vocab"},
|
190
|
+
"rdfs:subPropertyOf": {"@type": "@vocab"},
|
191
|
+
"schema:domainIncludes": {"@type": "@vocab"},
|
192
|
+
"schema:rangeIncludes": {"@type": "@vocab"},
|
193
|
+
"owl:equivalentClass": {"@type": "@vocab"},
|
194
|
+
"owl:equivalentProperty": {"@type": "@vocab"},
|
195
|
+
"owl:oneOf": {"@container": "@list", "@type": "@vocab"},
|
196
|
+
"owl:imports": {"@type": "@id"},
|
197
|
+
"owl:versionInfo": {"@type": "@id"},
|
198
|
+
"owl:inverseOf": {"@type": "@vocab"},
|
199
|
+
"owl:unionOf": {"@type": "@vocab", "@container": "@list"},
|
200
|
+
"rdfs_classes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
201
|
+
"rdfs_properties": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
202
|
+
"rdfs_datatypes": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"},
|
203
|
+
"rdfs_instances": {"@reverse": "rdfs:isDefinedBy", "@type": "@id"}
|
204
|
+
})
|
205
|
+
rdfs_classes, rdfs_properties, rdfs_datatypes, rdfs_instances = [], [], [], [], []
|
157
206
|
|
158
|
-
|
159
|
-
|
207
|
+
ontology = {
|
208
|
+
"@context" => rdfs_context,
|
209
|
+
"@id" => to_uri.to_s
|
210
|
+
}
|
160
211
|
|
161
|
-
|
162
|
-
|
163
|
-
context[pfx.to_s] = uri.to_s unless pfx.to_s.empty?
|
164
|
-
end
|
212
|
+
# Find namespaces used in the vocabulary
|
213
|
+
graph = RDF::Graph.new {|g| each_statement {|s| g << s}} if graph.nil? || graph.empty?
|
165
214
|
|
166
|
-
|
167
|
-
|
215
|
+
prefixes = vocab_prefixes(graph).merge(prefixes || {})
|
216
|
+
prefixes.each do |pfx, uri|
|
217
|
+
context[pfx.to_s] = uri.to_s unless pfx.to_s.empty?
|
218
|
+
end
|
168
219
|
|
169
|
-
|
170
|
-
|
171
|
-
next unless Array(term.qname).length == 2
|
172
|
-
context[term.qname.last.to_s] = term.to_uri.to_s
|
173
|
-
end
|
220
|
+
# Determine the category for each subject in the vocabulary graph
|
221
|
+
cats = subject_categories(graph)
|
174
222
|
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
heading: "# #{__name__.split('::').last} Vocabulary definition\n",
|
181
|
-
bucket: ontology,
|
182
|
-
},
|
183
|
-
classes: {
|
184
|
-
heading: "# Class definitions\n",
|
185
|
-
bucket: rdfs_classes,
|
186
|
-
rev_prop: "rdfs_classes"
|
187
|
-
},
|
188
|
-
properties: {
|
189
|
-
heading: "# Property definitions\n",
|
190
|
-
bucket: rdfs_properties,
|
191
|
-
rev_prop: "rdfs_properties"
|
192
|
-
},
|
193
|
-
datatypes: {
|
194
|
-
heading: "# Datatype definitions\n",
|
195
|
-
bucket: rdfs_datatypes,
|
196
|
-
rev_prop: "rdfs_datatypes"
|
197
|
-
},
|
198
|
-
other: {
|
199
|
-
heading: "# Other definitions\n",
|
200
|
-
bucket: rdfs_instances,
|
201
|
-
rev_prop: "rdfs_instances"
|
202
|
-
}
|
203
|
-
}.each do |key, hash|
|
204
|
-
next unless cats[key]
|
205
|
-
|
206
|
-
cats[key].each do |subject|
|
207
|
-
node = {"@id" => subject.pname}
|
208
|
-
po = {}
|
209
|
-
|
210
|
-
# Group predicates with their values
|
211
|
-
graph.query(subject: subject) do |statement|
|
212
|
-
# Sanity check this, as these are set to an empty string if not defined.
|
213
|
-
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
214
|
-
po[statement.predicate] ||= []
|
215
|
-
po[statement.predicate] << statement.object
|
216
|
-
end
|
223
|
+
# Generate term definitions from graph subjects
|
224
|
+
cats.values.flatten.each do |term|
|
225
|
+
next unless Array(term.qname).length == 2
|
226
|
+
context[term.qname.last.to_s] = term.to_uri.to_s
|
227
|
+
end
|
217
228
|
|
218
|
-
|
229
|
+
# Parse the two contexts so we know what terms are in scope
|
230
|
+
jld_context = ::JSON::LD::Context.new.parse([context, rdfs_context])
|
231
|
+
|
232
|
+
{
|
233
|
+
ont: {
|
234
|
+
heading: "# #{__name__.split('::').last} Vocabulary definition\n",
|
235
|
+
bucket: ontology,
|
236
|
+
},
|
237
|
+
classes: {
|
238
|
+
heading: "# Class definitions\n",
|
239
|
+
bucket: rdfs_classes,
|
240
|
+
rev_prop: "rdfs_classes"
|
241
|
+
},
|
242
|
+
properties: {
|
243
|
+
heading: "# Property definitions\n",
|
244
|
+
bucket: rdfs_properties,
|
245
|
+
rev_prop: "rdfs_properties"
|
246
|
+
},
|
247
|
+
datatypes: {
|
248
|
+
heading: "# Datatype definitions\n",
|
249
|
+
bucket: rdfs_datatypes,
|
250
|
+
rev_prop: "rdfs_datatypes"
|
251
|
+
},
|
252
|
+
other: {
|
253
|
+
heading: "# Other definitions\n",
|
254
|
+
bucket: rdfs_instances,
|
255
|
+
rev_prop: "rdfs_instances"
|
256
|
+
}
|
257
|
+
}.each do |key, hash|
|
258
|
+
next unless cats[key]
|
259
|
+
|
260
|
+
cats[key].each do |subject|
|
261
|
+
node = {"@id" => subject.pname}
|
262
|
+
po = {}
|
263
|
+
|
264
|
+
# Group predicates with their values
|
265
|
+
graph.query(subject: subject) do |statement|
|
266
|
+
# Sanity check this, as these are set to an empty string if not defined.
|
267
|
+
next if [RDF::RDFS.label, RDF::RDFS.comment].include?(statement.predicate) && statement.object.to_s.empty?
|
268
|
+
po[statement.predicate] ||= []
|
269
|
+
po[statement.predicate] << statement.object
|
270
|
+
end
|
219
271
|
|
220
|
-
|
272
|
+
next if po.empty?
|
221
273
|
|
222
|
-
|
223
|
-
term = jld_context.compact_iri(predicate, vocab: true)
|
224
|
-
node[term] = if jld_context.container(term) == '@language'
|
225
|
-
lang_map = objects.inject({}) do |memo, o|
|
226
|
-
raise "Language-mapped term #{term} with non plain-literal #{o.inspect}" unless o.literal? && o.plain?
|
227
|
-
memo.merge(o.language.to_s => o.value)
|
228
|
-
end
|
229
|
-
# Don't use language map if there's only one entry with no language
|
230
|
-
lang_map = lang_map[""] if lang_map.keys == [""]
|
231
|
-
[lang_map]
|
232
|
-
else
|
233
|
-
objects.map do |o|
|
234
|
-
expanded_value = jld_context.expand_value(term, o)
|
235
|
-
jld_context.compact_value(term, expanded_value)
|
236
|
-
end
|
237
|
-
end
|
238
|
-
end
|
274
|
+
node['@type'] = po.delete(RDF.type).map {|t| jld_context.compact_iri(t, vocab: true)}
|
239
275
|
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
276
|
+
po.each do |predicate, objects|
|
277
|
+
term = jld_context.compact_iri(predicate, vocab: true)
|
278
|
+
node[term] = if jld_context.container(term) == '@language'
|
279
|
+
lang_map = objects.inject({}) do |memo, o|
|
280
|
+
raise "Language-mapped term #{term} with non plain-literal #{o.inspect}" unless o.literal? && o.plain?
|
281
|
+
memo.merge(o.language.to_s => o.value)
|
282
|
+
end
|
283
|
+
# Don't use language map if there's only one entry with no language
|
284
|
+
lang_map = lang_map[""] if lang_map.keys == [""]
|
285
|
+
[lang_map]
|
286
|
+
else
|
287
|
+
objects.map do |o|
|
288
|
+
expanded_value = jld_context.expand_value(term, o)
|
289
|
+
jld_context.compact_value(term, expanded_value)
|
244
290
|
end
|
245
291
|
end
|
292
|
+
end
|
246
293
|
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
ontology[hash[:rev_prop]] ||= hash[:bucket]
|
252
|
-
hash[:bucket] << node
|
294
|
+
node.each do |property, values|
|
295
|
+
case values.length
|
296
|
+
when 0 then node.delete(property)
|
297
|
+
when 1 then node[property] = values.first
|
253
298
|
end
|
254
299
|
end
|
255
|
-
end
|
256
300
|
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
301
|
+
# Either set bucket from node, or append node to bucket
|
302
|
+
if hash[:bucket].is_a?(Hash)
|
303
|
+
hash[:bucket].merge!(node)
|
304
|
+
else
|
305
|
+
ontology[hash[:rev_prop]] ||= hash[:bucket]
|
306
|
+
hash[:bucket] << node
|
307
|
+
end
|
308
|
+
end
|
262
309
|
end
|
310
|
+
|
311
|
+
# Serialize result
|
312
|
+
{
|
313
|
+
"@context" => context,
|
314
|
+
"@graph" => ontology
|
315
|
+
}.to_json(::JSON::LD::JSON_STATE)
|
263
316
|
rescue LoadError
|
264
317
|
# No JSON-LD serialization unless gem loaded
|
265
318
|
end
|
@@ -267,8 +320,8 @@ module RDF
|
|
267
320
|
##
|
268
321
|
# Generate HTML+RDFa representation, specific to vocabularies. This uses generated JSON-LD and a Haml template.
|
269
322
|
#
|
270
|
-
# @param [RDF::Queryable]
|
271
|
-
# @param [Hash{#to_sym => String}]
|
323
|
+
# @param [RDF::Queryable] graph Optional graph, otherwise uses statements from vocabulary.
|
324
|
+
# @param [Hash{#to_sym => String}] prefixes to add to output
|
272
325
|
# @param [String, Hash] jsonld
|
273
326
|
# If not provided, the `to_jsonld` method is used to generate it.
|
274
327
|
# @param [String] template The path to a Haml or ERB template used to generate the output using the JSON-LD serialization
|
@@ -343,7 +396,7 @@ module RDF
|
|
343
396
|
when /.erb$/
|
344
397
|
require 'erubis'
|
345
398
|
eruby = Erubis::FastEruby.new(File.read(template))
|
346
|
-
|
399
|
+
eruby.evaluate(binding: self, ont: expanded, context: json['@context'], prefixes: prefixes)
|
347
400
|
else
|
348
401
|
raise "Unknown template type #{template}. Should have '.erb' or '.haml' extension"
|
349
402
|
end
|