rdf-agraph 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/AUTHORS +1 -0
- data/README.md +305 -0
- data/UNLICENSE +24 -0
- data/VERSION +1 -0
- data/lib/rdf/allegro_graph/abstract_repository.rb +484 -0
- data/lib/rdf/allegro_graph/functors/sna_functors.rb +148 -0
- data/lib/rdf/allegro_graph/functors.rb +4 -0
- data/lib/rdf/allegro_graph/query/functor_expression.rb +56 -0
- data/lib/rdf/allegro_graph/query/prolog_literal.rb +29 -0
- data/lib/rdf/allegro_graph/query.rb +124 -0
- data/lib/rdf/allegro_graph/repository.rb +81 -0
- data/lib/rdf/allegro_graph/server.rb +93 -0
- data/lib/rdf/allegro_graph/session.rb +78 -0
- data/lib/rdf/allegro_graph/sna_generator.rb +37 -0
- data/lib/rdf/allegro_graph.rb +14 -0
- data/lib/rdf-agraph.rb +2 -0
- metadata +227 -0
@@ -0,0 +1,484 @@
|
|
1
|
+
module RDF::AllegroGraph
|
2
|
+
# Features shared by regular AllegroGraph repositories and by persistent
|
3
|
+
# backend sessions.
|
4
|
+
#
|
5
|
+
# Note that this class does not interoperate well with the Unix `fork`
|
6
|
+
# command if you're using blank nodes. See README.md for details.
|
7
|
+
class AbstractRepository < RDF::Repository
|
8
|
+
# This code is based on
|
9
|
+
# http://blog.datagraph.org/2010/04/rdf-repository-howto
|
10
|
+
#
|
11
|
+
# For comparison purposes, here's a list of other RDF::Repository
|
12
|
+
# implementations:
|
13
|
+
#
|
14
|
+
# https://github.com/fumi/rdf-4store/blob/master/lib/rdf/four_store/repository.rb
|
15
|
+
# https://github.com/bendiken/rdf-bert/blob/master/lib/rdf/bert/client.rb
|
16
|
+
# https://github.com/bendiken/rdf-cassandra/blob/master/lib/rdf/cassandra/repository.rb (more complete than many)
|
17
|
+
# https://github.com/bhuga/rdf-do/blob/master/lib/rdf/do.rb
|
18
|
+
# https://github.com/pius/rdf-mongo/blob/master/lib/rdf/mongo.rb
|
19
|
+
# https://github.com/njh/rdf-redstore/blob/master/lib/rdf/redstore/repository.rb
|
20
|
+
# https://github.com/bendiken/rdf-sesame/blob/master/lib/rdf/sesame/repository.rb
|
21
|
+
# https://github.com/bhuga/rdf-talis/blob/master/lib/rdf/talis/repository.rb
|
22
|
+
# https://github.com/bendiken/sparql-client/blob/master/lib/sparql/client/repository.rb
|
23
|
+
#
|
24
|
+
# We actually stack up pretty well against this list.
|
25
|
+
|
26
|
+
|
27
|
+
#--------------------------------------------------------------------
|
28
|
+
# @group RDF::Repository methods
|
29
|
+
|
30
|
+
# Create a new AllegroGraph repository adapter.
|
31
|
+
#
|
32
|
+
# @param [AllegroGraph::Resource] resource
|
33
|
+
# The underlying 'agraph'-based implementation to wrap.
|
34
|
+
# @private
|
35
|
+
def initialize(resource)
|
36
|
+
@repo = resource
|
37
|
+
@blank_nodes = []
|
38
|
+
@blank_nodes_to_generate = 8
|
39
|
+
@blank_nodes_local_to_server = {}
|
40
|
+
@blank_nodes_server_to_local = {}
|
41
|
+
end
|
42
|
+
|
43
|
+
# Returns true if `feature` is supported.
|
44
|
+
#
|
45
|
+
# @param [Symbol] feature
|
46
|
+
# @return [Boolean]
|
47
|
+
def supports?(feature)
|
48
|
+
case feature.to_sym
|
49
|
+
when :context then true
|
50
|
+
else super
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
|
55
|
+
#--------------------------------------------------------------------
|
56
|
+
# @group RDF::Transaction support
|
57
|
+
#
|
58
|
+
# TODO: Implement before_execute and after_execute. Note that
|
59
|
+
# RDF::Transaction can only operate on a single graph at a time. The
|
60
|
+
# RDF.rb transaction API is still pretty weak, and it is expected to
|
61
|
+
# be refined over the course of the RDF.rb 0.3.x series.
|
62
|
+
#
|
63
|
+
# Or should we implement the methods described here?
|
64
|
+
# http://blog.datagraph.org/2010/12/rdf-for-ruby It's not clear how
|
65
|
+
# we should tackle this.
|
66
|
+
|
67
|
+
|
68
|
+
#--------------------------------------------------------------------
|
69
|
+
# @group RDF::Enumerable methods
|
70
|
+
|
71
|
+
# Iterate over all statements in the repository. This is used by
|
72
|
+
# RDF::Enumerable as a fallback for handling any unimplemented
|
73
|
+
# methods.
|
74
|
+
#
|
75
|
+
# @yield [statement]
|
76
|
+
# @yieldparam [RDF::Statement] statement
|
77
|
+
# @yieldreturn [void]
|
78
|
+
# @return [void]
|
79
|
+
def each(&block)
|
80
|
+
query_pattern(RDF::Query::Pattern.new, &block)
|
81
|
+
end
|
82
|
+
|
83
|
+
# Does the repository contain the specified statement?
|
84
|
+
#
|
85
|
+
# @param [RDF::Statement] statement
|
86
|
+
# @return [Boolean]
|
87
|
+
def has_statement?(statement)
|
88
|
+
found = @repo.statements.find(statement_to_dict(statement))
|
89
|
+
!found.empty?
|
90
|
+
end
|
91
|
+
|
92
|
+
# TODO: There are lots of methods with names like 'predicates',
|
93
|
+
# 'each_predicate', etc., that we could usefully override if anybody
|
94
|
+
# needs to be able to list all the predicates in the repository
|
95
|
+
# without scanning every record. But we'll wait until somebody needs
|
96
|
+
# those before overriding the default implementations.
|
97
|
+
|
98
|
+
|
99
|
+
#--------------------------------------------------------------------
|
100
|
+
# @group RDF::Countable methods
|
101
|
+
#
|
102
|
+
# TODO: I'd love to override these methods for the sake of
|
103
|
+
# performance, but RDF.rb does not want duplicate statements to be
|
104
|
+
# counted twice, and AllegoGraph does count them.
|
105
|
+
|
106
|
+
# Is this repository empty?
|
107
|
+
#def empty?
|
108
|
+
# count == 0
|
109
|
+
#end
|
110
|
+
|
111
|
+
# How many statements are in this repository?
|
112
|
+
#def count
|
113
|
+
# @repo.request_http(:get, path(:statements),
|
114
|
+
# :headers => { 'Accept' => 'text/integer' },
|
115
|
+
# :expected_status_code => 200).chomp.to_i
|
116
|
+
#end
|
117
|
+
|
118
|
+
|
119
|
+
#--------------------------------------------------------------------
|
120
|
+
# @group RDF::Queryable methods
|
121
|
+
|
122
|
+
# Find all RDF statements matching a pattern.
|
123
|
+
#
|
124
|
+
# @overload query_pattern(pattern) {|statement| ... }
|
125
|
+
# @yield statement
|
126
|
+
# @yieldparam [RDF::Statement] statement
|
127
|
+
# @yieldreturn [void]
|
128
|
+
# @return [void]
|
129
|
+
#
|
130
|
+
# @overload query_pattern(pattern)
|
131
|
+
# @return [Enumerator]
|
132
|
+
#
|
133
|
+
# @param [RDF::Query::Pattern] pattern A simple pattern to match.
|
134
|
+
# @return [void]
|
135
|
+
def query_pattern(pattern)
|
136
|
+
if block_given?
|
137
|
+
seen = {}
|
138
|
+
dict = statement_to_dict(pattern)
|
139
|
+
dict.delete(:context) if dict[:context] == 'null'
|
140
|
+
@repo.statements.find(dict).each do |statement|
|
141
|
+
unless seen.has_key?(statement)
|
142
|
+
seen[statement] = true
|
143
|
+
s,p,o,c = statement.map {|v| unserialize(v) }
|
144
|
+
if c.nil?
|
145
|
+
yield RDF::Statement.new(s,p,o)
|
146
|
+
else
|
147
|
+
yield RDF::Statement.new(s,p,o, :context => c)
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
151
|
+
else
|
152
|
+
enum_for(:query_pattern, pattern)
|
153
|
+
end
|
154
|
+
end
|
155
|
+
protected :query_pattern
|
156
|
+
|
157
|
+
# TODO: Override first, first_subject, first_predicate, first_object,
|
158
|
+
# first_literal for performance.
|
159
|
+
|
160
|
+
# Run an RDF::Query on the server.
|
161
|
+
#
|
162
|
+
# @param [RDF::Query] query The query to execute.
|
163
|
+
# @yield solution
|
164
|
+
# @yieldparam [RDF::Query::Solution] solution
|
165
|
+
# @yieldreturn [void]
|
166
|
+
#
|
167
|
+
# @see RDF::Queryable#query
|
168
|
+
# @see RDF::Query#execute
|
169
|
+
def query_execute(query, &block)
|
170
|
+
if query.respond_to?(:requires_prolog?) && query.requires_prolog?
|
171
|
+
prolog_query(query.to_prolog(self), &block)
|
172
|
+
else
|
173
|
+
sparql_query(query_to_sparql(query), &block)
|
174
|
+
end
|
175
|
+
end
|
176
|
+
protected :query_execute
|
177
|
+
|
178
|
+
|
179
|
+
#--------------------------------------------------------------------
|
180
|
+
# @group AllegroGraph-specific query methods
|
181
|
+
|
182
|
+
# Run a raw SPARQL query.
|
183
|
+
#
|
184
|
+
# @overload sparql_query(query) {|solution| ... }
|
185
|
+
# @yield solution
|
186
|
+
# @yieldparam [RDF::Query::Solution] solution
|
187
|
+
# @yieldreturn [void]
|
188
|
+
# @return [void]
|
189
|
+
#
|
190
|
+
# @overload sparql_query(pattern)
|
191
|
+
# @return [Enumerator<RDF::Query::Solution>]
|
192
|
+
#
|
193
|
+
# @param [String] query The query to run.
|
194
|
+
# @return [void]
|
195
|
+
# @note This function returns a single-use Enumerator! If you want to
|
196
|
+
# to treat the results as an array, call `to_a` on it, or you will
|
197
|
+
# re-run the query against the server repeatedly. This curious
|
198
|
+
# decision is made for consistency with RDF.rb.
|
199
|
+
def sparql_query(query, &block)
|
200
|
+
raw_query(:sparql, query, &block)
|
201
|
+
end
|
202
|
+
|
203
|
+
# Run a raw Prolog query.
|
204
|
+
#
|
205
|
+
# @overload prolog_query(query) {|solution| ... }
|
206
|
+
# @yield solution
|
207
|
+
# @yieldparam [RDF::Query::Solution] solution
|
208
|
+
# @yieldreturn [void]
|
209
|
+
# @return [void]
|
210
|
+
#
|
211
|
+
# @overload prolog_query(pattern)
|
212
|
+
# @return [Enumerator<RDF::Query::Solution>]
|
213
|
+
#
|
214
|
+
# @param [String] query The query to run.
|
215
|
+
# @return [void]
|
216
|
+
# @note This function returns a single-use Enumerator! If you want to
|
217
|
+
# to treat the results as an array, call `to_a` on it, or you will
|
218
|
+
# re-run the query against the server repeatedly. This curious
|
219
|
+
# decision is made for consistency with RDF.rb.
|
220
|
+
def prolog_query(query, &block)
|
221
|
+
raw_query(:prolog, query, &block)
|
222
|
+
end
|
223
|
+
|
224
|
+
# Run a raw query in the specified language.
|
225
|
+
def raw_query(language, query, &block)
|
226
|
+
@repo.query.language = language
|
227
|
+
results = json_to_query_solutions(@repo.query.perform(query))
|
228
|
+
if block_given?
|
229
|
+
results.each {|s| yield s }
|
230
|
+
else
|
231
|
+
enum_for(:raw_query, language, query)
|
232
|
+
end
|
233
|
+
end
|
234
|
+
protected :raw_query
|
235
|
+
|
236
|
+
# Construct an AllegroGraph-specific query.
|
237
|
+
#
|
238
|
+
# @yield query
|
239
|
+
# @yieldparam [Query] The query to build. Use the Query API to add
|
240
|
+
# patterns and functors.
|
241
|
+
# @yieldreturn [void]
|
242
|
+
# @return [Query]
|
243
|
+
#
|
244
|
+
# @see Query
|
245
|
+
# @see RDF::Query
|
246
|
+
def build_query(&block)
|
247
|
+
Query.new(self, &block)
|
248
|
+
end
|
249
|
+
|
250
|
+
|
251
|
+
#--------------------------------------------------------------------
|
252
|
+
# @group RDF::Mutable methods
|
253
|
+
|
254
|
+
# Insert a single statement into the repository.
|
255
|
+
#
|
256
|
+
# @param [RDF::Statement] statement
|
257
|
+
# @return [void]
|
258
|
+
def insert_statement(statement)
|
259
|
+
insert_statements([statement])
|
260
|
+
end
|
261
|
+
protected :insert_statement
|
262
|
+
|
263
|
+
# Insert multiple statements at once.
|
264
|
+
#
|
265
|
+
# @param [Array<RDF::Statement>] statements
|
266
|
+
# @return [void]
|
267
|
+
def insert_statements(statements)
|
268
|
+
# FIXME: RDF.rb expects duplicate statements to be ignored if
|
269
|
+
# inserted into a mutable store, but AllegoGraph allows duplicate
|
270
|
+
# statements. We work around this in our other methods, but we
|
271
|
+
# need to either use transactions, find appropriate AllegroGraph
|
272
|
+
# documentation, or talk to the RDF.rb folks.
|
273
|
+
#
|
274
|
+
# A discussion of duplicate RDF statements:
|
275
|
+
# http://lists.w3.org/Archives/Public/www-rdf-interest/2004Oct/0091.html
|
276
|
+
#
|
277
|
+
# Note that specifying deleteDuplicates on repository creation doesn't
|
278
|
+
# seem to affect this.
|
279
|
+
json = statements_to_json(statements)
|
280
|
+
@repo.request_json(:post, path(:statements), :body => json,
|
281
|
+
:expected_status_code => 204)
|
282
|
+
end
|
283
|
+
protected :insert_statements
|
284
|
+
|
285
|
+
# Delete a single statement from the repository.
|
286
|
+
#
|
287
|
+
# @param [RDF::Statement] statement
|
288
|
+
# @return [void]
|
289
|
+
def delete_statement(statement)
|
290
|
+
# TODO: Do we need to handle invalid statements here by turning them
|
291
|
+
# into queries and deleting all matching statements?
|
292
|
+
delete_statements([statement])
|
293
|
+
end
|
294
|
+
protected :delete_statement
|
295
|
+
|
296
|
+
# Delete multiple statements from the repository.
|
297
|
+
#
|
298
|
+
# @param [Array<RDF::Statement>] statements
|
299
|
+
# @return [void]
|
300
|
+
def delete_statements(statements)
|
301
|
+
json = statements_to_json(statements)
|
302
|
+
@repo.request_json(:post, path('statements/delete'),
|
303
|
+
:body => json, :expected_status_code => 204)
|
304
|
+
end
|
305
|
+
protected :delete_statements
|
306
|
+
|
307
|
+
# TODO: Override delete to implement fast wildcard deletion without
|
308
|
+
# having to first query for the matching records.
|
309
|
+
|
310
|
+
# Clear all statements from the repository.
|
311
|
+
#
|
312
|
+
# @return [void]
|
313
|
+
def clear
|
314
|
+
@repo.statements.delete
|
315
|
+
end
|
316
|
+
|
317
|
+
|
318
|
+
#--------------------------------------------------------------------
|
319
|
+
# @group Serialization methods
|
320
|
+
|
321
|
+
# Serialize an RDF::Value for transmission to the server. This
|
322
|
+
# is exported for low-level libraries that need to access our
|
323
|
+
# serialization and deserialization machinery, which has special-case
|
324
|
+
# support for RDF nodes.
|
325
|
+
#
|
326
|
+
# @param [RDF::Value,RDF::Query::Variable] value
|
327
|
+
# @return [String]
|
328
|
+
# @see #serialize_prolog
|
329
|
+
def serialize(value)
|
330
|
+
case value
|
331
|
+
when RDF::Query::Variable then value.to_s
|
332
|
+
else RDF::NTriples::Writer.serialize(map_to_server(value))
|
333
|
+
end
|
334
|
+
end
|
335
|
+
|
336
|
+
# Serialize an RDF::Value for use in a Prolog expression that will
|
337
|
+
# be transmitted to the server.
|
338
|
+
#
|
339
|
+
# @param [RDF::Value,RDF::Query::Variable] value
|
340
|
+
# @return [String]
|
341
|
+
# @see #serialize
|
342
|
+
def serialize_prolog(value)
|
343
|
+
case value
|
344
|
+
when RDF::AllegroGraph::Query::PrologLiteral then value.to_s
|
345
|
+
when RDF::Query::Variable then value.to_s
|
346
|
+
else "!#{serialize(value)}"
|
347
|
+
end
|
348
|
+
end
|
349
|
+
|
350
|
+
|
351
|
+
protected
|
352
|
+
|
353
|
+
# Build a repository-relative path.
|
354
|
+
def path(relative_path)
|
355
|
+
"#{@repo.path}/#{relative_path}"
|
356
|
+
end
|
357
|
+
|
358
|
+
# Deserialize an RDF::Value received from the server, or an array of such
|
359
|
+
# values when working with Prolog queries.
|
360
|
+
#
|
361
|
+
# @param [String,Array] str_or_array
|
362
|
+
# A string, or a possibly-nested array of strings.
|
363
|
+
# @return [RDF::Value]
|
364
|
+
# @see #serialize
|
365
|
+
def unserialize(str_or_array)
|
366
|
+
case str_or_array
|
367
|
+
when Array then str_or_array.map {|v| unserialize(v) }
|
368
|
+
else map_from_server(RDF::NTriples::Reader.unserialize(str_or_array))
|
369
|
+
end
|
370
|
+
end
|
371
|
+
|
372
|
+
# Convert a list of statements to a JSON-compatible array.
|
373
|
+
def statements_to_json(statements)
|
374
|
+
statements.map do |s|
|
375
|
+
tuple = [s.subject, s.predicate, s.object]
|
376
|
+
tuple << s.context if s.context
|
377
|
+
tuple.map {|v| serialize(v) }
|
378
|
+
end
|
379
|
+
end
|
380
|
+
|
381
|
+
# Translate a RDF::Statement into a dictionary the we can pass
|
382
|
+
# directly to the 'agraph' gem.
|
383
|
+
def statement_to_dict(statement)
|
384
|
+
{
|
385
|
+
:subject => serialize(statement.subject),
|
386
|
+
:predicate => serialize(statement.predicate),
|
387
|
+
:object => serialize(statement.object),
|
388
|
+
# We have to pass the null context explicitly if we only want
|
389
|
+
# to operate a single statement. Otherwise, we will operate
|
390
|
+
# on all matching s,p,o triples regardless of context.
|
391
|
+
:context => serialize(statement.context) || 'null'
|
392
|
+
}
|
393
|
+
end
|
394
|
+
|
395
|
+
# Convert a query to SPARQL.
|
396
|
+
def query_to_sparql(query)
|
397
|
+
variables = []
|
398
|
+
patterns = []
|
399
|
+
query.patterns.each do |p|
|
400
|
+
p.variables.each {|_,v| variables << v unless variables.include?(v) }
|
401
|
+
triple = [p.subject, p.predicate, p.object]
|
402
|
+
str = triple.map {|v| serialize(v) }.join(" ")
|
403
|
+
# TODO: Wrap in graph block for context!
|
404
|
+
if p.optional?
|
405
|
+
str = "OPTIONAL { #{str} }"
|
406
|
+
end
|
407
|
+
patterns << "#{str} ."
|
408
|
+
end
|
409
|
+
"SELECT #{variables.join(" ")}\nWHERE {\n #{patterns.join("\n ")} }"
|
410
|
+
end
|
411
|
+
|
412
|
+
# Convert a JSON query solution to a list of RDF::Query::Solution
|
413
|
+
# objects.
|
414
|
+
def json_to_query_solutions(json)
|
415
|
+
names = json['names'].map {|n| n.to_sym }
|
416
|
+
json['values'].map do |match|
|
417
|
+
hash = {}
|
418
|
+
names.each_with_index do |name, i|
|
419
|
+
# TODO: I'd like to include nil values, too, but
|
420
|
+
# RDF::Query#execute does not yet do so, so we'll filter them for
|
421
|
+
# now.
|
422
|
+
hash[name] = unserialize(match[i]) unless match[i].nil?
|
423
|
+
end
|
424
|
+
RDF::Query::Solution.new(hash)
|
425
|
+
end
|
426
|
+
end
|
427
|
+
|
428
|
+
# Return true if this a blank RDF node.
|
429
|
+
def blank_node?(value)
|
430
|
+
!value.nil? && value.anonymous?
|
431
|
+
end
|
432
|
+
|
433
|
+
# Ask AllegroGraph to generate a series of blank node IDs.
|
434
|
+
def generate_blank_nodes(amount)
|
435
|
+
response = @repo.request_http(:post, path(:blankNodes),
|
436
|
+
:parameters => { :amount => amount },
|
437
|
+
:expected_status_code => 200)
|
438
|
+
response.chomp.split("\n").map {|i| i.gsub(/^_:/, '') }
|
439
|
+
end
|
440
|
+
|
441
|
+
# Allocate an "official" AllegroGraph blank node, which should
|
442
|
+
# maintain its identity across requests.
|
443
|
+
def allocate_blank_node
|
444
|
+
if @blank_nodes.empty?
|
445
|
+
@blank_nodes = generate_blank_nodes(@blank_nodes_to_generate).reverse
|
446
|
+
@blank_nodes_to_generate *= 2
|
447
|
+
end
|
448
|
+
@blank_nodes.pop
|
449
|
+
end
|
450
|
+
|
451
|
+
# Create a mapping between a local blank node ID and a server-side
|
452
|
+
# blank node ID.
|
453
|
+
def map_blank_node(local_id, server_id)
|
454
|
+
#puts "Mapping #{local_id} -> #{server_id}"
|
455
|
+
@blank_nodes_local_to_server[local_id] = server_id
|
456
|
+
@blank_nodes_server_to_local[server_id] = local_id
|
457
|
+
end
|
458
|
+
|
459
|
+
# Translate this value to a server-specific representation, taking
|
460
|
+
# care to handle blank nodes correctly.
|
461
|
+
def map_to_server(value)
|
462
|
+
return value unless blank_node?(value)
|
463
|
+
unless @blank_nodes_local_to_server.has_key?(value.id)
|
464
|
+
new_id = allocate_blank_node
|
465
|
+
map_blank_node(value.id, new_id)
|
466
|
+
end
|
467
|
+
RDF::Node.new(@blank_nodes_local_to_server[value.id])
|
468
|
+
end
|
469
|
+
|
470
|
+
# Translate this value to a client-specific representation, taking
|
471
|
+
# care to handle blank nodes correctly.
|
472
|
+
def map_from_server(value)
|
473
|
+
return value unless blank_node?(value)
|
474
|
+
if @blank_nodes_server_to_local.has_key?(value.id)
|
475
|
+
RDF::Node.new(@blank_nodes_server_to_local[value.id])
|
476
|
+
else
|
477
|
+
# We didn't generate this node ID, so we want to pass it back to
|
478
|
+
# the server unchanged.
|
479
|
+
map_blank_node(value.id, value.id)
|
480
|
+
value
|
481
|
+
end
|
482
|
+
end
|
483
|
+
end
|
484
|
+
end
|
@@ -0,0 +1,148 @@
|
|
1
|
+
module RDF::AllegroGraph::Functors
|
2
|
+
|
3
|
+
# This module contains AllegroGraph functor definitions that may be
|
4
|
+
# called when building a query. Note that these functors merely add a
|
5
|
+
# functor expression to a query. The actual functor will be called on
|
6
|
+
# the server.
|
7
|
+
#
|
8
|
+
# @see Session#generator
|
9
|
+
module SnaFunctors
|
10
|
+
# @private
|
11
|
+
PrologLiteral = RDF::AllegroGraph::Query::PrologLiteral
|
12
|
+
|
13
|
+
# @group Paths Through the Graph
|
14
|
+
|
15
|
+
# Search for paths between two nodes following the edges specified by
|
16
|
+
# generator, and using a breadth-first search strategy.
|
17
|
+
#
|
18
|
+
# @param [Symbol,RDF::Resource] from
|
19
|
+
# Input: The start node in the path.
|
20
|
+
# @param [Symbol,RDF::Resource] to
|
21
|
+
# Input: The end node in the path.
|
22
|
+
# @param [PrologLiteral] generator
|
23
|
+
# Input: The generator to use when finding links to traverse.
|
24
|
+
# @param [Symbol] to
|
25
|
+
# Output: A list of nodes in the path.
|
26
|
+
# @param [Hash] options
|
27
|
+
# @option options [Integer] :max_depth
|
28
|
+
# Input: The maxium search depth.
|
29
|
+
# @return [void]
|
30
|
+
def breadth_first_search_paths(from, to, generator, path, options={})
|
31
|
+
search_paths('breadth-first-search-paths', from, to, generator, path,
|
32
|
+
options)
|
33
|
+
end
|
34
|
+
|
35
|
+
# Search for paths between two nodes following the edges specified by
|
36
|
+
# generator, and using a depth-first search strategy.
|
37
|
+
#
|
38
|
+
# @param [Symbol,RDF::Resource] from
|
39
|
+
# Input: The start node in the path.
|
40
|
+
# @param [Symbol,RDF::Resource] to
|
41
|
+
# Input: The end node in the path.
|
42
|
+
# @param [PrologLiteral] generator
|
43
|
+
# Input: The generator to use when finding links to traverse.
|
44
|
+
# @param [Symbol] to
|
45
|
+
# Output: A list of nodes in the path.
|
46
|
+
# @param [Hash] options
|
47
|
+
# @option options [Integer] :max_depth
|
48
|
+
# Input: The maxium search depth.
|
49
|
+
# @return [void]
|
50
|
+
def depth_first_search_paths(from, to, generator, path, options={})
|
51
|
+
search_paths('depth-first-search-paths', from, to, generator, path,
|
52
|
+
options)
|
53
|
+
end
|
54
|
+
|
55
|
+
# Search for paths between two nodes following the edges specified by
|
56
|
+
# generator, and using a bidirectional search strategy.
|
57
|
+
#
|
58
|
+
# @param [Symbol,RDF::Resource] from
|
59
|
+
# Input: The start node in the path.
|
60
|
+
# @param [Symbol,RDF::Resource] to
|
61
|
+
# Input: The end node in the path.
|
62
|
+
# @param [PrologLiteral] generator
|
63
|
+
# Input: The generator to use when finding links to traverse.
|
64
|
+
# @param [Symbol] to
|
65
|
+
# Output: A list of nodes in the path.
|
66
|
+
# @param [Hash] options
|
67
|
+
# @option options [Integer] :max_depth
|
68
|
+
# Input: The maxium search depth.
|
69
|
+
# @return [void]
|
70
|
+
def bidirectional_search_paths(from, to, generator, path, options={})
|
71
|
+
search_paths('bidirectional-search-paths', from, to, generator, path,
|
72
|
+
options)
|
73
|
+
end
|
74
|
+
|
75
|
+
# @private
|
76
|
+
def search_paths(functor_name, from, to, generator, path, options={})
|
77
|
+
if options.has_key?(:max_depth)
|
78
|
+
functor(functor_name, from, to, generator,
|
79
|
+
PrologLiteral.new(options[:max_depth]), path)
|
80
|
+
else
|
81
|
+
functor(functor_name, from, to, generator, path)
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
|
86
|
+
# @group Nearby Nodes
|
87
|
+
|
88
|
+
# Count the neighbors of the actor.
|
89
|
+
#
|
90
|
+
# @param [Symbol,RDF::Resource] actor
|
91
|
+
# Input: The node whose neighbors we want to find.
|
92
|
+
# @param [PrologLiteral] generator
|
93
|
+
# Input: The generator to use when finding links to traverse.
|
94
|
+
# @param [Symbol] count
|
95
|
+
# Output: The number of neighbors of the actor.
|
96
|
+
# @return [void]
|
97
|
+
def neighbor_count(actor, generator, count)
|
98
|
+
functor('nodal-degree', actor, generator, count)
|
99
|
+
end
|
100
|
+
alias_method :nodal_degree, :neighbor_count
|
101
|
+
|
102
|
+
# Find all neighbors of the actor.
|
103
|
+
#
|
104
|
+
# @param [Symbol,RDF::Resource] actor
|
105
|
+
# Input: The node whose neighbors we want to find.
|
106
|
+
# @param [PrologLiteral] generator
|
107
|
+
# Input: The generator to use when finding links to traverse.
|
108
|
+
# @param [Symbol] neighbor
|
109
|
+
# Output: A neighbor of the actor.
|
110
|
+
# @return [void]
|
111
|
+
def neighbors(actor, generator, neighbor)
|
112
|
+
functor('nodal-neighbors', actor, generator, neighbor)
|
113
|
+
end
|
114
|
+
alias_method :nodal_neighbors, :neighbors
|
115
|
+
|
116
|
+
# Generate an actor's ego group.
|
117
|
+
#
|
118
|
+
# @param [Symbol,RDF::Resource] actor
|
119
|
+
# Input: The resource at the center of the graph.
|
120
|
+
# @param [Integer] depth
|
121
|
+
# Input: The maximum number of links to traverse.
|
122
|
+
# @param [PrologLiteral] generator
|
123
|
+
# Input: The generator to use when finding links to traverse.
|
124
|
+
# @param [Array<RDF::Resource>] group
|
125
|
+
# Output: Either a variable or resource.
|
126
|
+
# @return [void]
|
127
|
+
def ego_group(actor, depth, generator, group)
|
128
|
+
functor('ego-group', actor, PrologLiteral.new(depth),
|
129
|
+
generator, group)
|
130
|
+
end
|
131
|
+
|
132
|
+
# Generate all members of an actor's ego group.
|
133
|
+
#
|
134
|
+
# @param [Symbol,RDF::Resource] actor
|
135
|
+
# Input: The resource at the center of the graph.
|
136
|
+
# @param [Integer] depth
|
137
|
+
# Input: The maximum number of links to traverse.
|
138
|
+
# @param [PrologLiteral] generator
|
139
|
+
# Input: The generator to use when finding links to traverse.
|
140
|
+
# @param [Symbol,RDF::Resource] group
|
141
|
+
# Input/Output: Either a variable or resource.
|
142
|
+
# @return [void]
|
143
|
+
def ego_group_member(actor, depth, generator, member)
|
144
|
+
functor('ego-group-member', actor, PrologLiteral.new(depth),
|
145
|
+
generator, member)
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
class RDF::AllegroGraph::Query
|
2
|
+
|
3
|
+
# A functor expression in a Prolog query (other than an ordinary
|
4
|
+
# pattern).
|
5
|
+
#
|
6
|
+
# @see RDF::Query::Pattern
|
7
|
+
# @see RDF::AllegroGraph::Functors
|
8
|
+
class FunctorExpression
|
9
|
+
# The name of this functor.
|
10
|
+
attr_reader :name
|
11
|
+
|
12
|
+
# The arguments passed to this functor.
|
13
|
+
attr_reader :arguments
|
14
|
+
|
15
|
+
# Construct a new functor.
|
16
|
+
#
|
17
|
+
# @param [String] name
|
18
|
+
# @param [Array<Symbol,RDF::Value,value>] arguments
|
19
|
+
# The arguments to the functor, which may be either variables,
|
20
|
+
# RDF::Value objects, or Ruby values that we can convert to literals.
|
21
|
+
def initialize(name, *arguments)
|
22
|
+
@name = name
|
23
|
+
@arguments = arguments.map do |arg|
|
24
|
+
case arg
|
25
|
+
when Symbol then RDF::Query::Variable.new(arg)
|
26
|
+
when PrologLiteral, RDF::Value then arg
|
27
|
+
else RDF::Literal.new(arg)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
# Return a hash table of all variables used in this functor. This
|
33
|
+
# is intended to be duck-type compatible with the same method in
|
34
|
+
# RDF::Query::Pattern.
|
35
|
+
#
|
36
|
+
# @return [Hash<Symbol,RDF::Query::Variable>]
|
37
|
+
# @see RDF::Query::Pattern#variables
|
38
|
+
def variables
|
39
|
+
result = {}
|
40
|
+
@arguments.each do |arg|
|
41
|
+
result.merge!(arg.variables) if arg.is_a?(RDF::Query::Variable)
|
42
|
+
end
|
43
|
+
result
|
44
|
+
end
|
45
|
+
|
46
|
+
# Convert this functor to a Prolog Lisp expression.
|
47
|
+
#
|
48
|
+
# @param [RDF::AllegroGraph::Repository] repository
|
49
|
+
# @return [String]
|
50
|
+
# @private
|
51
|
+
def to_prolog(repository)
|
52
|
+
args = arguments.map {|a| repository.serialize_prolog(a) }
|
53
|
+
"(#{name} #{args.join(" ")})"
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|