talia_core 0.5.0 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/VERSION.yml CHANGED
@@ -1,5 +1,5 @@
1
1
  ---
2
2
  :major: 0
3
3
  :minor: 5
4
- :patch: 0
4
+ :patch: 1
5
5
  :build:
@@ -16,6 +16,8 @@ namespaces:
16
16
  talias: http://trac.talia.discovery-project.eu/wiki/StructuralOntology#
17
17
  taliadom: http://trac.talia.discovery-project.eu/wiki/SampleDomainOntology#
18
18
  sesame: http://www.openrdf.org/schema/sesame#
19
+ swicky: http://discovery-project.eu/ontologies/philoSpace/
20
+ discovery: http://discovery-project.eu/ontologies/scholar/0.1/
19
21
  site_name: Talia Test
20
22
  iip_server_uri: http://localhost:80/fcgi-bin/iipsrv.fcgi
21
23
  vips_command: /opt/local/vips
@@ -44,4 +44,6 @@ namespaces:
44
44
  luccadom: http://trac.talia.discovery-project.eu/wiki/LuccaOntology#
45
45
  talias: http://trac.talia.discovery-project.eu/wiki/StructuralOntology#
46
46
  taliadom: http://trac.talia.discovery-project.eu/wiki/SampleDomainOntology#
47
- sesame: http://www.openrdf.org/schema/sesame#
47
+ sesame: http://www.openrdf.org/schema/sesame#
48
+ swicky: http://discovery-project.eu/ontologies/philoSpace/
49
+ discovery: http://discovery-project.eu/ontologies/scholar/0.1/
@@ -1,13 +1,13 @@
1
1
  class OntologiesController < ApplicationController
2
2
 
3
3
  def index
4
- onto_qry = Query.new(N::URI).select(:context).distinct.where(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, :context)
4
+ onto_qry = ActiveRDF::Query.new(N::URI).select(:context).distinct.where(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, :context)
5
5
  @ontologies = onto_qry.execute.collect { |context| context.local_name }
6
6
  end
7
7
 
8
8
  def show
9
9
  ontology_url = N::TALIA + URI.encode(params[:id])
10
- @triples = Query.new(N::URI).select(:s, :p, :o).where(:s, :p, :o, ontology_url).execute
10
+ @triples = ActiveRDF::Query.new(N::URI).select(:s, :p, :o).where(:s, :p, :o, ontology_url).execute
11
11
  end
12
12
 
13
13
  end
@@ -158,7 +158,7 @@ class SourcesController < ApplicationController
158
158
 
159
159
  def source_types
160
160
  return @source_types if(@source_types)
161
- @source_types = Query.new(N::URI).select(:type).distinct.where(:source, N::RDF.type, :type).execute
161
+ @source_types = ActiveRDF::Query.new(N::URI).select(:type).distinct.where(:source, N::RDF.type, :type).execute
162
162
  @source_types
163
163
  end
164
164
 
@@ -0,0 +1,57 @@
1
+ require 'builder'
2
+
3
+ module Swicky
4
+
5
+ # Helper class to encode API result codes for the controller
6
+ class ApiResult
7
+
8
+ attr_accessor :result, :message
9
+ RESULTS = {
10
+ :success => 200,
11
+ :illegal_parameter => 400,
12
+ :error => 500
13
+ }
14
+
15
+ def initialize(result, message)
16
+ raise(ArgumentError, "Illegal Result #{result}") unless(RESULTS.keys.include?(result.to_sym))
17
+ @result = result.to_sym
18
+ @message = message
19
+ end
20
+
21
+ def http_status
22
+ RESULTS[result]
23
+ end
24
+
25
+ def to_xml
26
+ xml = ''
27
+ builder = Builder::XmlMarkup.new(:target => xml, :indent => 2)
28
+ builder.instruct!
29
+ builder.swicky_api do
30
+ builder.result(result.to_s)
31
+ builder.message(message)
32
+ end
33
+ xml
34
+ end
35
+
36
+ def to_json
37
+ { :result => result, :message => message }.to_json
38
+ end
39
+
40
+ def to_html
41
+ html = ''
42
+ builder = Builder::XmlMarkup.new(:target => html, :indent => 2)
43
+ builder.declare! :DOCTYPE, :html, :PUBLIC, "-//W3C//DTD XHTML 1.0 Strict//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"
44
+ builder.html do
45
+ builder.head { builder.title("Swicky request result") }
46
+ builder.body do
47
+ builder.h1("Result Code")
48
+ builder.p(result.to_s)
49
+ builder.h1("Message")
50
+ builder.p(message)
51
+ end
52
+ end
53
+ html
54
+ end
55
+
56
+ end
57
+ end
@@ -0,0 +1,175 @@
1
+ module Swicky
2
+
3
+ # Takes a number of triples and encodes them in the SIMILE JSON format
4
+ class JsonEncoder
5
+
6
+ def initialize(triples)
7
+ @triples = triples
8
+ @types_hash = {}
9
+ @properties_hash = {}
10
+ @label_hash = {}
11
+ @label_inverse = {}
12
+ end
13
+
14
+ def to_json
15
+ @items ||= begin
16
+ items = []
17
+ # First a round to make sure that each item has a label
18
+ triple_hash.each { |object, values| values['label'] = make_label!(object, values) }
19
+ # Now build the items themselves
20
+ triple_hash.each { |object, values| items += build_item(object, values) }
21
+ items
22
+ end
23
+ hashy = { 'items' => @items, 'types' => @types_hash, 'properties' => @properties_hash }
24
+ { 'items' => @items, 'types' => @types_hash, 'properties' => @properties_hash }.to_json
25
+ end
26
+
27
+ private
28
+
29
+ def triple_hash
30
+ @triple_hash ||= begin
31
+ triple_hash = {}
32
+ # Sort all the triples into a hash, mapping all predicates
33
+ # to a single subject entry and all objects into a single
34
+ # predicate entry for each subject or predicate
35
+ @triples.each do |triple|
36
+ subject = triple.shift.to_uri
37
+ triple_hash[subject] ||= {}
38
+ predicate = triple.shift.to_uri
39
+ triple_hash[subject][predicate] ||= []
40
+ triple_hash[subject][predicate] << triple.first
41
+ end
42
+ triple_hash
43
+ end
44
+ end
45
+
46
+ # Builds the entry for one "item". This alwasys returns an array, as there may
47
+ # be additional items created as placeholders for label references
48
+ def build_item(object, values)
49
+ items = []
50
+ item = {}
51
+ item['uri'] = object.to_s
52
+
53
+ item['type'] = make_types!(values)
54
+ item['label'] = values.delete('label')
55
+
56
+ # Add the normal predicates
57
+ values.each do |predicate, objects|
58
+ predicate_local = make_predicate_local(predicate)
59
+ resource_labels, additional_items = as_labels(objects)
60
+ item[predicate_local] = resource_labels
61
+ items += additional_items
62
+ end
63
+
64
+ items << item
65
+ items
66
+ end
67
+
68
+ # Turns the given resources into label references
69
+ def as_labels(resources)
70
+ additional_items = []
71
+ labels = resources.collect do |res|
72
+ label, additional = resource_label(res)
73
+ additional_items << additional if(additional)
74
+ label
75
+ end
76
+ [(labels.size == 1) ? labels.first : labels, additional_items]
77
+ end
78
+
79
+ def resource_label(resource)
80
+ return resource if(resource.is_a?(String))
81
+ return @label_inverse[resource.to_s] if(@label_inverse[resource.to_s])
82
+ label = make_label!(resource, {})
83
+ [label, {'uri' => resource.to_s, 'label' => label}]
84
+ end
85
+
86
+ # Check the type definitions from the given hash and transform them
87
+ # to "type" entries in the JSON structure
88
+ def make_types!(hash)
89
+ # First, check for the types
90
+ types = hash.delete(get_key_from(N::RDF.type, hash)) || []
91
+ # Add a default type if we don't have one
92
+ if(types.empty?)
93
+ types = [ N::RDF.Resource ]
94
+ end
95
+ # All types will be referred to by their local name
96
+ types.collect! { |type| make_type_local(type) }
97
+ types
98
+ end
99
+
100
+
101
+ # Create the label: Either use the RDFS label or the last part of th
102
+ # uri. This also inserts the label into an "inverse" hash so that
103
+ # labels can be looked up by the uri
104
+ def make_label!(uri, hash)
105
+ return @label_inverse[uri.to_s] if(@label_inverse[uri.to_s]) # label already exists
106
+ label = hash.delete(get_key_from(N::RDFS.label, hash)) || []
107
+ if(label.empty?)
108
+ label = check_label(label_for(uri), uri)
109
+ else
110
+ label = check_label(label.first, uri)
111
+ end
112
+
113
+ label.to_s
114
+ end
115
+
116
+ # Get the part of the the uri that can be used as a label
117
+ def label_for(uri)
118
+ /[\/#]?([^\/#]+)\Z/.match(uri.to_s)[1]
119
+ end
120
+
121
+ # Check if the given label can be used, and adapt it if necessary
122
+ def check_label(label, uri)
123
+ label = first_free(label, uri.to_s, @label_hash)
124
+ @label_inverse[uri.to_s] ||= label
125
+ label
126
+ end
127
+
128
+ # Little kludge for getting badly-specified keys
129
+ def get_key_from(key, hash)
130
+ hash.keys.find { |k| k.to_s == key.to_s }
131
+ end
132
+
133
+ # Finds the first "free" element in the hash. This checks
134
+ # if hash[initial_value] is empty or equal to "value", if that is not the
135
+ # case if will try "initial_value2", "initial_value3", ... until the
136
+ # condition is fulfilled
137
+ def first_free(initial_key, value, hash)
138
+ candidate = initial_key
139
+ hash[candidate] ||= value
140
+ count = 1
141
+ while(hash[candidate] != value)
142
+ count += 1
143
+ candidate = "#{initial_key}#{count}"
144
+ hash[candidate] ||= value
145
+ end
146
+ candidate
147
+ end
148
+
149
+ # Create the local name for the predicate, and add the definition
150
+ # to the "properties" hash if necessary. This will also attempt to
151
+ # avoid collisions if some predicates map to the same local name
152
+ #
153
+ # TODO: Doesn't force RDF:label etc to map to the "correct" local
154
+ # name in case of collisions
155
+ def make_predicate_local(predicate)
156
+ first_free(predicate.to_uri.local_name, { 'uri' => predicate.to_s, 'valueType' => 'item' }, @properties_hash)
157
+ end
158
+
159
+ # Making local for types
160
+ def make_type_local(type)
161
+ first_free(type.to_uri.local_name, { 'uri' => type.to_s }, @types_hash)
162
+ end
163
+
164
+
165
+ # The entry for the "properties" hash for the given predicate
166
+ def predicate_property(predicate_uri)
167
+ {
168
+ "uri" => predicate_url,
169
+ "valueType" => "item"
170
+ }
171
+ end
172
+
173
+ end
174
+
175
+ end
@@ -0,0 +1,128 @@
1
+ require 'tempfile'
2
+
3
+ module Swicky
4
+
5
+ # Represents a SWicky Notebook in the RDF store. This wraps the queries to handle
6
+ # the SWicky annotations and user notebooks.
7
+ #
8
+ # A notebook is an RDF subgraph that is store in its own context.
9
+ #
10
+ # All parameters for this class that end up in RDF queries will be sanitized
11
+ # automatically
12
+ class Notebook
13
+
14
+ include TaliaUtil::UriHelper
15
+ extend TaliaUtil::UriHelper
16
+
17
+ attr_reader :user_url, :url
18
+
19
+ def initialize(user_name, notebook_name)
20
+ @user_url = self.class.user_url(user_name)
21
+ @url = self.class.notebook_url(user_name, notebook_name)
22
+ end
23
+
24
+ def data
25
+ @data ||= ActiveRDF::Query.new(N::URI).select(:s, :p, :o).distinct.where(:s, :p, :o, url).execute
26
+ end
27
+
28
+ def xml_data
29
+ TaliaUtil::Xml::RdfBuilder.xml_string_for_triples(data)
30
+ end
31
+
32
+ def delete
33
+ ActiveRDF::FederationManager.delete(nil, nil, nil, url)
34
+ ActiveRDF::FederationManager.delete(user_url, N::TALIA.hasSwickyNotebook, url)
35
+ ActiveRDF::FederationManager.delete(url, N::RDF.type, N::TALIA.SwickyNotebook)
36
+ end
37
+
38
+ def load(xml_file)
39
+ @data = nil
40
+ begin
41
+ ActiveRDF::ConnectionPool.write_adapter.load(xml_file, 'rdfxml', url)
42
+ rescue Exception => e
43
+ puts "\tProblem loading #{xml_file.to_s}: (#{e.message}) File not loaded!"
44
+ puts e.backtrace
45
+ end
46
+ ActiveRDF::FederationManager.add(user_url, N::TALIA.hasSwickyNotebook, url)
47
+ ActiveRDF::FederationManager.add(url, N::RDF.type, N::TALIA.SwickyNotebook)
48
+ end
49
+
50
+ def create(xml_data)
51
+ # Make a temp file for the data
52
+ tmpfile = Tempfile.new('xml_notebook')
53
+ tmpfile << xml_data
54
+ tmpfile.close
55
+ # Load into store
56
+ load(tmpfile.path)
57
+ # remove the temp file
58
+ tmpfile.unlink
59
+ end
60
+
61
+ def exist?
62
+ ActiveRDF::Query.new(N::URI).select(:user).where(:user, N::TALIA.hasSwickyNotebook, url).execute.size > 0
63
+ end
64
+
65
+ class << self
66
+ def find_all(user_name = nil)
67
+ nb_query = ActiveRDF::Query.new(N::URI).select(:notebook).distinct
68
+ nb_query.where(:notebook, N::RDF.type, N::TALIA.SwickyNotebook)
69
+ nb_query.where(user_url(user_name), N::TALIA.hasSwickyNotebook, :notebook) if(user_name)
70
+ nb_query.execute
71
+ end
72
+
73
+ def user_url(user_name)
74
+ sanitize_sparql(N::LOCAL + "users/#{user_name}").to_uri
75
+ end
76
+
77
+ def notebook_url(user_name, notebook_name)
78
+ sanitize_sparql(user_url(user_name) + '/swicky_notebooks/' + notebook_name).to_uri
79
+ end
80
+
81
+ def coordinates_for(url)
82
+ url = sanitize_sparql(url).to_uri
83
+ frag_qry = ActiveRDF::Query.new(N::URI).select(:coordinates).distinct
84
+ frag_qry.where(:fragment, N::DISCOVERY.isPartOf, url)
85
+ frag_qry.where(:fragment, N::SWICKY.hasCoordinates, :coordinates)
86
+ frag_qry.where(:note, N::SWICKY.refersTo, :fragment)
87
+ frag_qry.execute.collect { |coord| coord.to_s }
88
+ end
89
+
90
+ def annotations_for_url(url)
91
+ url = sanitize_sparql(url).to_uri
92
+ select_annotations([:note, N::SWICKY.refersTo, url])
93
+ end
94
+
95
+ def annotations_for_xpointer(xpointer)
96
+ xpointer = sanitize_sparql(xpointer).to_uri
97
+ select_annotations([:note, N::SWICKY.refersTo, :fragment], [:fragment, N::SWICKY.hasCoordinates, xpointer])
98
+ end
99
+
100
+ private
101
+
102
+ def select_annotations(*note_matching)
103
+ # Select all triples on the notes
104
+ note_triples_qry = ActiveRDF::Query.new(N::URI).select(:note, :predicate, :object).distinct
105
+ note_matching.each { |conditions| note_triples_qry.where(*conditions) }
106
+ note_triples = note_triples_qry.where(:note, :predicate, :object).execute
107
+ # Select all statements on the triples
108
+ statement_triples_qry = ActiveRDF::Query.new(N::URI).select(:statement, :predicate, :object).distinct
109
+ note_matching.each { |conditions| statement_triples_qry.where(*conditions) }
110
+ statement_triples_qry.where(:note, N::SWICKY.hasStatement, :statement).where(:statement, :predicate, :object)
111
+ result_triples = note_triples + statement_triples_qry.execute
112
+ # TODO: Fix this to better query once available in ActiveRDF
113
+ additional_triples = []
114
+ result_triples.each do |trip|
115
+ additional_triples += ActiveRDF::Query.new(N::URI).select(:predicate, :object).where(trip[1].to_uri, :predicate, :object).execute.collect { |result| [trip[1].to_uri] + result }
116
+ if(trip.last.respond_to?(:uri))
117
+ additional_triples += ActiveRDF::Query.new(N::URI).select(:predicate, :object).where(trip.last, :predicate, :object).execute.collect { |result| [trip.last] + result }
118
+ end
119
+ end
120
+
121
+ # Return all results
122
+ result_triples + additional_triples
123
+ end
124
+
125
+ end
126
+
127
+ end
128
+ end
@@ -21,7 +21,7 @@ module TaliaCore
21
21
  class ActiveSource < ActiveRecord::Base
22
22
 
23
23
  # Act like an ActiveRdfResource
24
- include RDFS::ResourceLike
24
+ include ActiveRDF::ResourceLike
25
25
 
26
26
  extend ActiveSourceParts::ClassMethods
27
27
  extend ActiveSourceParts::Finders
@@ -15,10 +15,12 @@ module TaliaCore
15
15
  # present. If attributes for one source are imported in more than one place, all
16
16
  # subsequent calls will merge the newly imported attributes with the existing ones.
17
17
  class GenericReader
18
+
18
19
 
19
20
  extend TaliaUtil::IoHelper
20
21
  include TaliaUtil::IoHelper
21
22
  include TaliaUtil::Progressable
23
+ include TaliaUtil::UriHelper
22
24
 
23
25
  # Helper class for state
24
26
  class State
@@ -163,13 +165,6 @@ module TaliaCore
163
165
 
164
166
  private
165
167
 
166
-
167
- # Removes all characters that are illegal in IRIs, so that the
168
- # URIs can be imported
169
- def irify(uri)
170
- N::URI.new(uri.to_s.gsub( /[{}|\\^`\s]/, '+')).to_s
171
- end
172
-
173
168
  # Call the handler method for the given element. If a block is given, that
174
169
  # will be called instead
175
170
  def call_handler(element)
@@ -3,18 +3,11 @@ module TaliaCore
3
3
  module Xml
4
4
 
5
5
  # Class for creating xml-rdf data
6
- class RdfBuilder < BaseBuilder
6
+ class RdfBuilder < TaliaUtil::Xml::RdfBuilder
7
7
 
8
- # Writes a simple "flat" triple. If the object is a string, it will be
9
- # treated as a "value" while an object (ActiveSource or N::URI) will be treated
10
- # as a "link"
11
- def write_triple(subject, predicate, object)
12
- subject = subject.respond_to?(:uri) ? subject.uri.to_s : subject
13
- predicate = predicate.respond_to?(:uri) ? predicate : N::URI.new(predicate)
14
- @builder.rdf :Description, "rdf:about" => subject do
15
- write_predicate(predicate, [ object ])
8
+ def self.build_source(source)
9
+ make_xml_string { |build| build.write_source(source) }
16
10
  end
17
- end
18
11
 
19
12
  # Writes a complete source to the rdf
20
13
  def write_source(source)
@@ -24,58 +17,18 @@ module TaliaCore
24
17
  write_predicate(predicate, source[predicate])
25
18
  end
26
19
  end
27
- end
28
-
29
- private
30
20
 
31
- # Build the structure for the XML file and pass on to
32
- # the given block
33
- def build_structure
34
- @builder.rdf :RDF, self.class.namespaces do
35
- yield
21
+ source.inverse_predicates.each do |predicate|
22
+ source.inverse[predicate].each do |inverse_subject|
23
+ @builder.rdf :Description, 'rdf:about' => inverse_subject do
24
+ write_predicate(predicate, [source])
25
+ end
26
+ end
36
27
  end
37
28
  end
38
29
 
39
30
 
40
- def self.namespaces
41
- @namespaces ||= begin
42
- namespaces = {}
43
- N::Namespace.shortcuts.each { |key, value| namespaces["xmlns:#{key.to_s}"] = value.to_s }
44
- namespaces
45
31
  end
46
32
  end
47
-
48
- # Build an rdf/xml string for one predicate, with the given values
49
- def write_predicate(predicate, values)
50
- values.each { |val| write_single_predicate(predicate, val) }
51
- end # end method
52
-
53
- def write_single_predicate(predicate, value)
54
- is_property = value.respond_to?(:uri)
55
- value_properties = is_property ? { 'value' => value } : extract_values(value.to_s)
56
- value = value_properties.delete('value')
57
- @builder.tag!(predicate.to_name_s, value_properties) do
58
- if(is_property)
59
- @builder.rdf :Description, 'rdf:about' => value.uri.to_s
60
- else
61
- @builder.text!(value)
62
- end
63
33
  end
64
34
  end
65
-
66
- # Splits up the value, extracting encoded language codes and RDF data types. The
67
- # result will be returned as a hash, with the "true" value being "value"
68
- def extract_values(value)
69
- prop_string = PropertyString.parse(value)
70
- result = {}
71
- result['value'] = prop_string
72
- result['rdf:datatype'] = prop_string.type if(prop_string.type)
73
- result['xml:lang'] = prop_string.lang if(prop_string.lang)
74
-
75
- result
76
- end
77
-
78
- end
79
- end
80
- end
81
- end
@@ -27,7 +27,11 @@ module TaliaCore
27
27
  # </source>
28
28
  # ...
29
29
  # </sources>
30
- class SourceBuilder < BaseBuilder
30
+ class SourceBuilder < TaliaUtil::Xml::BaseBuilder
31
+
32
+ def self.build_source(source)
33
+ make_xml_string { |build| build.write_source(source) }
34
+ end
31
35
 
32
36
  # Builds the RDF for a single source
33
37
  def write_source(source)
@@ -303,7 +303,7 @@ module TaliaCore
303
303
  # Logginging goes to standard talia logger
304
304
  ActiveRdfLogger.logger = talia_logger
305
305
 
306
- ConnectionPool.add_data_source(rdf_connection_opts)
306
+ ActiveRDF::ConnectionPool.add_data_source(rdf_connection_opts)
307
307
  end
308
308
 
309
309
  # Configure the namespaces
@@ -353,7 +353,7 @@ module TaliaCore
353
353
  return unless(@config['auto_ontologies'] && !['false', 'no'].include?(@config['auto_ontologies'].downcase))
354
354
  onto_dir = File.join(TALIA_ROOT, @config['auto_ontologies'])
355
355
  raise(SystemInitializationError, "Cannot find configured ontology dir #{onto_dir}") unless(File.directory?(onto_dir))
356
- adapter = ConnectionPool.write_adapter
356
+ adapter = ActiveRDF::ConnectionPool.write_adapter
357
357
  raise(SystemInitializationError, "Ontology autoloading without a context-aware adapter deletes all RDF data. This is only allowed in testing, please load the ontology manually.") unless(adapter.supports_context? || (@environment == 'testing'))
358
358
  raise(SystemInitializationError, "Ontology autoloading requires 'load' capability on the adapter.") unless(adapter.respond_to?(:load))
359
359
 
@@ -53,17 +53,17 @@ module TaliaCore
53
53
  # Clear the currently registered ontologies
54
54
  def clear_file_contexts
55
55
  # Remove all registered contexts
56
- to_clear = Query.new(N::URI).select(:context).distinct.where(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, :context).execute
56
+ to_clear = ActiveRDF::Query.new(N::URI).select(:context).distinct.where(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, :context).execute
57
57
  to_clear.each do |context|
58
58
  adapter.clear(context)
59
59
  end
60
- FederationManager.delete(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, nil)
60
+ ActiveRDF::FederationManager.delete(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, nil)
61
61
  end
62
62
 
63
63
  private
64
64
 
65
65
  def adapter
66
- @adapter ||= ConnectionPool.write_adapter
66
+ @adapter ||= ActiveRDF::ConnectionPool.write_adapter
67
67
  end
68
68
 
69
69
  # Prepare the context for the ontology import. All contexts will be registered
@@ -80,7 +80,7 @@ module TaliaCore
80
80
  N::URI.new(N::TALIA + context)
81
81
  end
82
82
 
83
- FederationManager.add(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, file_context)
83
+ ActiveRDF::FederationManager.add(N::TALIA.rdf_context_space, N::TALIA.rdf_file_context, file_context)
84
84
 
85
85
  file_context
86
86
  end