pho 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- data/README +56 -0
- data/Rakefile +63 -0
- data/lib/pho.rb +21 -0
- data/lib/pho/etags.rb +54 -0
- data/lib/pho/rdf_collection.rb +107 -0
- data/lib/pho/store.rb +389 -0
- data/lib/test.rb +29 -0
- data/tests/tc_contentbox.rb +96 -0
- data/tests/tc_etags.rb +38 -0
- data/tests/tc_jobcontrol.rb +59 -0
- data/tests/tc_metabox.rb +124 -0
- data/tests/tc_rdf_collection.rb +127 -0
- data/tests/tc_search.rb +54 -0
- data/tests/tc_sparql.rb +76 -0
- data/tests/tc_store_util.rb +14 -0
- data/tests/ts_pho.rb +10 -0
- metadata +102 -0
data/README
ADDED
@@ -0,0 +1,56 @@
|
|
1
|
+
The Pho ruby module provides a lightweight Ruby client library for interacting with the Talis
|
2
|
+
Platform API (http://n2.talis.com/wiki/Platform_API).
|
3
|
+
|
4
|
+
== Author
|
5
|
+
|
6
|
+
Leigh Dodds (leigh.dodds@talis.com)
|
7
|
+
|
8
|
+
== Download
|
9
|
+
|
10
|
+
The latest version of this library can be downloaded from:
|
11
|
+
|
12
|
+
http://pho.rubyforge.net
|
13
|
+
|
14
|
+
== Usage
|
15
|
+
|
16
|
+
To use Pho you can:
|
17
|
+
|
18
|
+
require 'pho'
|
19
|
+
|
20
|
+
And then create an instance of the appropriate class, e.g Store:
|
21
|
+
|
22
|
+
store = Pho::Store.new("http://api.talis.com/testing", "user", "pass")
|
23
|
+
response = store.describe("http://www.example.org")
|
24
|
+
|
25
|
+
For more detailed examples consult the documentation for the Store class
|
26
|
+
|
27
|
+
== Control over HTTP interactions
|
28
|
+
|
29
|
+
Pho is dependent on the HTTPClient module and all HTTP interactions are delegated to
|
30
|
+
an instance of the HTTPClient class. In circumstances where greater control over the
|
31
|
+
HTTP interaction is required, e.g. to configure proxy servers, etc, an existing instance of
|
32
|
+
this class can be provided, e.g:
|
33
|
+
|
34
|
+
client = HTTPClient.new
|
35
|
+
=> configure client as required
|
36
|
+
store = Pho::Store.new("http://api.talis.com/testing", "user", "pass", client)
|
37
|
+
=> pass instance of client as parameter
|
38
|
+
response = store.describe("http://www.example.org")
|
39
|
+
|
40
|
+
== License
|
41
|
+
|
42
|
+
Copyright 2009 Leigh Dodds
|
43
|
+
|
44
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
45
|
+
you may not use this file except in compliance with the License.
|
46
|
+
|
47
|
+
You may obtain a copy of the License at
|
48
|
+
|
49
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
50
|
+
|
51
|
+
Unless required by applicable law or agreed to in writing,
|
52
|
+
software distributed under the License is distributed on an "AS IS" BASIS,
|
53
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
54
|
+
|
55
|
+
See the License for the specific language governing permissions and limitations
|
56
|
+
under the License.
|
data/Rakefile
ADDED
@@ -0,0 +1,63 @@
|
|
1
|
+
require 'rake'
|
2
|
+
require 'rake/gempackagetask'
|
3
|
+
require 'rake/rdoctask'
|
4
|
+
require 'rake/testtask'
|
5
|
+
require 'rake/clean'
|
6
|
+
|
7
|
+
NAME = "pho"
|
8
|
+
VER = "0.0.1"
|
9
|
+
|
10
|
+
RDOC_OPTS = ['--quiet', '--title', 'Pho (Talis Platform Client) Reference', '--main', 'README']
|
11
|
+
|
12
|
+
PKG_FILES = %w( README Rakefile ) +
|
13
|
+
Dir.glob("{bin,doc,tests,lib}/**/*")
|
14
|
+
|
15
|
+
CLEAN.include ['*.gem', 'pkg']
|
16
|
+
SPEC =
|
17
|
+
Gem::Specification.new do |s|
|
18
|
+
s.name = NAME
|
19
|
+
s.version = VER
|
20
|
+
s.platform = Gem::Platform::RUBY
|
21
|
+
s.has_rdoc = true
|
22
|
+
s.extra_rdoc_files = ["README"]
|
23
|
+
s.rdoc_options = RDOC_OPTS
|
24
|
+
s.summary = "Ruby client for the Talis Platform"
|
25
|
+
s.description = s.summary
|
26
|
+
s.author = "Leigh Dodds"
|
27
|
+
s.email = 'leigh.dodds@talis.com'
|
28
|
+
s.homepage = 'http://pho.rubyforge.net'
|
29
|
+
s.rubyforge_project = 'pho'
|
30
|
+
s.files = PKG_FILES
|
31
|
+
s.require_path = "lib"
|
32
|
+
s.bindir = "bin"
|
33
|
+
s.test_file = "tests/ts_pho.rb"
|
34
|
+
s.add_dependency("httpclient", ">= 2.1.3.1")
|
35
|
+
s.add_dependency("json", ">= 1.1.3")
|
36
|
+
s.add_dependency("mocha", ">= 0.9.5")
|
37
|
+
end
|
38
|
+
|
39
|
+
Rake::GemPackageTask.new(SPEC) do |pkg|
|
40
|
+
pkg.need_tar = true
|
41
|
+
end
|
42
|
+
|
43
|
+
Rake::RDocTask.new do |rdoc|
|
44
|
+
rdoc.rdoc_dir = 'doc/rdoc'
|
45
|
+
rdoc.options += RDOC_OPTS
|
46
|
+
rdoc.main = "README"
|
47
|
+
rdoc.rdoc_files.add ["README"]
|
48
|
+
end
|
49
|
+
|
50
|
+
|
51
|
+
Rake::TestTask.new do |test|
|
52
|
+
test.test_files = FileList['tests/tc_*.rb']
|
53
|
+
end
|
54
|
+
|
55
|
+
task :install do
|
56
|
+
sh %{rake package}
|
57
|
+
sh %{sudo gem install pkg/#{NAME}-#{VER}}
|
58
|
+
end
|
59
|
+
|
60
|
+
|
61
|
+
task :uninstall => [:clean] do
|
62
|
+
sh %{sudo gem uninstall #{NAME}}
|
63
|
+
end
|
data/lib/pho.rb
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'httpclient'
|
3
|
+
require 'json'
|
4
|
+
|
5
|
+
require 'pho/etags'
|
6
|
+
require 'pho/store'
|
7
|
+
require 'pho/rdf_collection'
|
8
|
+
|
9
|
+
module Pho
|
10
|
+
|
11
|
+
ACCEPT_RDF = {"Accept" => "application/rdf+xml"}.freeze
|
12
|
+
ACCEPT_JSON = { "Accept" => "application/json" }.freeze
|
13
|
+
|
14
|
+
RDF_XML = {"Content-Type"=>"application/rdf+xml"}.freeze
|
15
|
+
|
16
|
+
JOB_RESET = "http://schemas.talis.com/2006/bigfoot/configuration#ResetDataJob".freeze
|
17
|
+
JOB_SNAPSHOT = "http://schemas.talis.com/2006/bigfoot/configuration#SnapshotJob".freeze
|
18
|
+
JOB_REINDEX = "http://schemas.talis.com/2006/bigfoot/configuration#ReindexJob".freeze
|
19
|
+
|
20
|
+
|
21
|
+
end
|
data/lib/pho/etags.rb
ADDED
@@ -0,0 +1,54 @@
|
|
1
|
+
require 'yaml'
|
2
|
+
|
3
|
+
module Pho
|
4
|
+
|
5
|
+
#Simple mechanism for managing etags
|
6
|
+
class Etags
|
7
|
+
|
8
|
+
attr_reader :file, :saved
|
9
|
+
|
10
|
+
def initialize(file = nil)
|
11
|
+
@file = file
|
12
|
+
@saved = true
|
13
|
+
@tags = Hash.new
|
14
|
+
if @file != nil
|
15
|
+
@tags = YAML::load(@file)[0]
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
def save(other=nil)
|
20
|
+
|
21
|
+
if (other != nil)
|
22
|
+
other.write( @tags.to_yaml() )
|
23
|
+
return
|
24
|
+
else
|
25
|
+
if (!saved && @file != nil )
|
26
|
+
@file.write( @tags.to_yaml() )
|
27
|
+
@file.close
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
end
|
32
|
+
|
33
|
+
def add(uri, tag)
|
34
|
+
if (uri != nil && tag != nil)
|
35
|
+
@tags[uri] = tag
|
36
|
+
@saved = false
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def add_from_response(uri, response)
|
41
|
+
add(uri, response.header["ETag"][0])
|
42
|
+
end
|
43
|
+
|
44
|
+
def get(uri)
|
45
|
+
return @tags[uri]
|
46
|
+
end
|
47
|
+
|
48
|
+
def has_tag?(uri)
|
49
|
+
return @tags.has_key?(uri)
|
50
|
+
end
|
51
|
+
|
52
|
+
end
|
53
|
+
|
54
|
+
end
|
@@ -0,0 +1,107 @@
|
|
1
|
+
module Pho
|
2
|
+
|
3
|
+
attr_reader :dir
|
4
|
+
attr_reader :store
|
5
|
+
|
6
|
+
# Provides a simple mechanism for managing a directory of RDF/XML documents
|
7
|
+
# and uploading them to platform store.
|
8
|
+
#
|
9
|
+
# Allows a collection to be mirrored into the platform
|
10
|
+
class RDFCollection
|
11
|
+
|
12
|
+
RDF = "rdf".freeze
|
13
|
+
OK = "ok".freeze
|
14
|
+
FAIL = "fail".freeze
|
15
|
+
|
16
|
+
def initialize(store, dir, rdf_suffix=RDF, ok_suffix=OK, fail_suffix=FAIL, sleep=1)
|
17
|
+
@store = store
|
18
|
+
@dir = dir
|
19
|
+
@sleep = sleep
|
20
|
+
@rdf_suffix = rdf_suffix
|
21
|
+
@ok_suffix = ok_suffix
|
22
|
+
@fail_suffix = fail_suffix
|
23
|
+
end
|
24
|
+
|
25
|
+
#Store all files that match the file name in directory
|
26
|
+
def store()
|
27
|
+
files_to_store = new_files()
|
28
|
+
files_to_store.each do |filename|
|
29
|
+
file = File.new(filename)
|
30
|
+
response = @store.store_file(file)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
#Retry anything known to have failed
|
35
|
+
def retry_failures()
|
36
|
+
#TODO
|
37
|
+
end
|
38
|
+
|
39
|
+
#Reset the directory to clear out any previous statuses
|
40
|
+
#Store can also be reset at the same time: use with care!
|
41
|
+
def reset(reset_store=false)
|
42
|
+
Dir.glob( File.join(@dir, "*.#{@fail_suffix}") ).each do |file|
|
43
|
+
File.delete(file)
|
44
|
+
end
|
45
|
+
Dir.glob( File.join(@dir, "*.#{@ok_suffix}") ).each do |file|
|
46
|
+
File.delete(file)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
#List files being managed
|
51
|
+
def list()
|
52
|
+
return Dir.glob( File.join(@dir, "*.#{@rdf_suffix}") )
|
53
|
+
end
|
54
|
+
|
55
|
+
#List failures
|
56
|
+
def failures()
|
57
|
+
fails = Array.new
|
58
|
+
Dir.glob( File.join(@dir, "*.#{@fail_suffix}") ).each do |file|
|
59
|
+
fails << file.gsub(/\.#{@fail_suffix}/, ".#{@rdf_suffix}")
|
60
|
+
end
|
61
|
+
return fails
|
62
|
+
end
|
63
|
+
|
64
|
+
#List successes
|
65
|
+
def successes()
|
66
|
+
successes = Array.new
|
67
|
+
Dir.glob( File.join(@dir, "*.#{@ok_suffix}") ).each do |file|
|
68
|
+
successes << file.gsub(/\.#{@ok_suffix}/, ".#{@rdf_suffix}")
|
69
|
+
end
|
70
|
+
return successes
|
71
|
+
end
|
72
|
+
|
73
|
+
#List any new files in the directory
|
74
|
+
def new_files()
|
75
|
+
newfiles = Array.new
|
76
|
+
Dir.glob( File.join(@dir, "*.#{@rdf_suffix}") ) do |file|
|
77
|
+
ok_file = get_ok_file_for(file)
|
78
|
+
fail_file = get_fail_file_for(file)
|
79
|
+
if !( File.exists?(ok_file) or File.exists?(fail_file) )
|
80
|
+
newfiles << file
|
81
|
+
end
|
82
|
+
end
|
83
|
+
return newfiles
|
84
|
+
end
|
85
|
+
|
86
|
+
#Summarize the state of the collection to the provied IO object
|
87
|
+
#Creates a simple report
|
88
|
+
def summary()
|
89
|
+
failures = failures()
|
90
|
+
successes = successes()
|
91
|
+
newfiles = new_files()
|
92
|
+
total = failures.size + successes.size + newfiles.size
|
93
|
+
summary = "#{@dir} contains #{total} files: #{successes.size} stored, #{failures.size} failed, #{newfiles.size} new"
|
94
|
+
return summary
|
95
|
+
end
|
96
|
+
|
97
|
+
def get_fail_file_for(filename)
|
98
|
+
return filename.gsub(/\.#{@rdf_suffix}/, ".#{@fail_suffix}")
|
99
|
+
end
|
100
|
+
|
101
|
+
def get_ok_file_for(filename)
|
102
|
+
return filename.gsub(/\.#{@rdf_suffix}/, ".#{@ok_suffix}")
|
103
|
+
end
|
104
|
+
|
105
|
+
end
|
106
|
+
|
107
|
+
end
|
data/lib/pho/store.rb
ADDED
@@ -0,0 +1,389 @@
|
|
1
|
+
module Pho
|
2
|
+
|
3
|
+
#TODO:
|
4
|
+
#
|
5
|
+
# Changesets
|
6
|
+
# Multisparql
|
7
|
+
#
|
8
|
+
# Conditional deletions
|
9
|
+
# If-Modified-Since support
|
10
|
+
# Robustness in uri fetching
|
11
|
+
#
|
12
|
+
# RDOC
|
13
|
+
|
14
|
+
# The Store class acts as a lightweight client interface to the Talis Platform API
|
15
|
+
# (http://n2.talis.com/wiki/Platform_API). The class provides methods for interacting
|
16
|
+
# with each of the core platform services, e.g. retrieving and storing RDF, performing
|
17
|
+
# searches, SPARQL queries, etc.
|
18
|
+
#
|
19
|
+
# == Usage
|
20
|
+
#
|
21
|
+
# store = Pho::Store.new("http://api.talis.com/stores/testing", "user", "pass")
|
22
|
+
# store.store_file( File.new("/tmp/example.rdf") )
|
23
|
+
# store.store_url( "http://www.example.org/example.rdf" )
|
24
|
+
# store.describe( "http://www.example.org/thing" )
|
25
|
+
# store.reset
|
26
|
+
#
|
27
|
+
# == Examples
|
28
|
+
#
|
29
|
+
class Store
|
30
|
+
|
31
|
+
#Retrieve the HTTPClient instance being used by this object
|
32
|
+
attr_reader :client
|
33
|
+
#Retrieve the admin username configured in this instance
|
34
|
+
attr_reader :username
|
35
|
+
#Retrieve the base uri of this store
|
36
|
+
attr_reader :storeuri
|
37
|
+
|
38
|
+
# Create an instance of the store class
|
39
|
+
#
|
40
|
+
# storeuri:: base uri for the Platform store to be accessed
|
41
|
+
# username:: admin username, may be nil
|
42
|
+
# password:: admin password, may be nil
|
43
|
+
# client:: an instance of HTTPClient
|
44
|
+
def initialize(storeuri, username=nil, password=nil, client = HTTPClient.new() )
|
45
|
+
@storeuri = storeuri.chomp("/")
|
46
|
+
@username = username
|
47
|
+
@password = password
|
48
|
+
@client = client
|
49
|
+
set_credentials(username, password) if username or password
|
50
|
+
end
|
51
|
+
|
52
|
+
# Set credentials that this store will use when carrying out authorization
|
53
|
+
#
|
54
|
+
# username:: admin username
|
55
|
+
# password:: admin password
|
56
|
+
def set_credentials(username, password)
|
57
|
+
@client.set_auth(@storeuri, username, password)
|
58
|
+
end
|
59
|
+
|
60
|
+
# Build a request uri, by concatenating it with the base uri of the store
|
61
|
+
# uri:: relative URI to store service, e.g. "/service/sparql"
|
62
|
+
def build_uri(uri)
|
63
|
+
if (uri.start_with?(@storeuri))
|
64
|
+
return uri
|
65
|
+
end
|
66
|
+
if uri.start_with?("/")
|
67
|
+
return @storeuri + uri
|
68
|
+
else
|
69
|
+
return @storeuri + "/" + uri
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
#############
|
74
|
+
# METABOX
|
75
|
+
#############
|
76
|
+
|
77
|
+
# Store some RDF in the Metabox associated with this store
|
78
|
+
# data:: a String containing the data to store
|
79
|
+
def store_data(data)
|
80
|
+
u = build_uri("/meta")
|
81
|
+
response = @client.post(u, data, RDF_XML )
|
82
|
+
return response
|
83
|
+
end
|
84
|
+
|
85
|
+
# Store the contents of a File (or any IO stream) in the Metabox associated with this store
|
86
|
+
# The client does not support streaming submissions of data, so the stream will be fully read before data is submitted to the platform
|
87
|
+
# file:: an IO object
|
88
|
+
def store_file(file)
|
89
|
+
data = file.read()
|
90
|
+
file.close()
|
91
|
+
return store_data(data)
|
92
|
+
end
|
93
|
+
|
94
|
+
# Retrieve RDF data from the specified URL and store it in the Store Metabox
|
95
|
+
#
|
96
|
+
# An Accept header of "application/rdf+xml" will be sent in the request to support retrieval of RDF from
|
97
|
+
# URLs that support Content Negotiation.
|
98
|
+
#
|
99
|
+
# u:: the url of the data
|
100
|
+
# parameters:: a Hash of url parameters to pass when requesting data from the specified URL
|
101
|
+
def store_url(u, parameters=nil)
|
102
|
+
|
103
|
+
headers = ACCEPT_RDF.clone()
|
104
|
+
dataresp = @client.get(u, parameters, headers )
|
105
|
+
|
106
|
+
#TODO make this more robust
|
107
|
+
if dataresp.status != 200
|
108
|
+
throw
|
109
|
+
end
|
110
|
+
|
111
|
+
return store_data(dataresp.content)
|
112
|
+
|
113
|
+
end
|
114
|
+
|
115
|
+
# Retrieve an RDF description of a specific URI. The default behaviour will be to retrieve an RDF/XML document, but other formats can be
|
116
|
+
# requested, as supported by the Talis Platform. E.g. application/json
|
117
|
+
#
|
118
|
+
# uri:: the URI of the resource to describe
|
119
|
+
# format:: the preferred response format
|
120
|
+
# etags:: an instance of the Pho::Etags class to support conditional GETs
|
121
|
+
# if_match:: specify true to retrieve data only if the version matches a known ETag, false to perform a Conditional GET
|
122
|
+
#
|
123
|
+
# Note that this method is different from sparql_describe in that it is intended to be used to generate a description of
|
124
|
+
# a single URI, using an separated service exposed by the Platform. This service is optimised for retrieval of descriptions for
|
125
|
+
# single resources and supports HTTP caching and conditional retrieval. The sparql_describe method should be used to submit
|
126
|
+
# more complex DESCRIBE queries to the Platform, e.g. to generate descriptions of resources matching a particular graph pattern.
|
127
|
+
def describe(uri, format="application/rdf+xml", etags=nil, if_match=false)
|
128
|
+
u = self.build_uri("/meta")
|
129
|
+
headers = {"Accept" => format}
|
130
|
+
headers = configure_headers_for_conditional_get("#{u}?about=#{uri}", headers, etags, if_match)
|
131
|
+
response = @client.get(u, {"about" => uri}, headers )
|
132
|
+
record_etags("#{u}?about=#{uri}", etags, response)
|
133
|
+
return response
|
134
|
+
end
|
135
|
+
|
136
|
+
#############
|
137
|
+
# SERVICES
|
138
|
+
#############
|
139
|
+
|
140
|
+
#Perform a SPARQL DESCRIBE query.
|
141
|
+
#
|
142
|
+
# query:: the SPARQL query
|
143
|
+
# format:: the preferred response format
|
144
|
+
def sparql_describe(query, format="application/rdf+xml")
|
145
|
+
return sparql(query, format)
|
146
|
+
end
|
147
|
+
|
148
|
+
#Perform a SPARQL CONSTRUCT query.
|
149
|
+
#
|
150
|
+
# query:: the SPARQL query
|
151
|
+
# format:: the preferred response format
|
152
|
+
def sparql_construct(query, format="application/rdf+xml")
|
153
|
+
return sparql(query, format)
|
154
|
+
end
|
155
|
+
|
156
|
+
#Perform a SPARQL ASK query.
|
157
|
+
#
|
158
|
+
# query:: the SPARQL query
|
159
|
+
# format:: the preferred response format
|
160
|
+
def sparql_ask(query, format="application/sparql-results+xml")
|
161
|
+
return sparql(query, format)
|
162
|
+
end
|
163
|
+
|
164
|
+
#Perform a SPARQL SELECT query.
|
165
|
+
#
|
166
|
+
# query:: the SPARQL query
|
167
|
+
# format:: the preferred response format
|
168
|
+
def sparql_select(query, format="application/sparql-results+xml")
|
169
|
+
return sparql(query, format)
|
170
|
+
end
|
171
|
+
|
172
|
+
#Perform a SPARQL query
|
173
|
+
#
|
174
|
+
# query:: the SPARQL query
|
175
|
+
# format:: the preferred response format
|
176
|
+
def sparql(query, format)
|
177
|
+
u = self.build_uri("/services/sparql")
|
178
|
+
params = {}
|
179
|
+
params["query"] = query
|
180
|
+
headers = {}
|
181
|
+
headers["Accept"] = format
|
182
|
+
response = @client.get(u, params, headers)
|
183
|
+
end
|
184
|
+
|
185
|
+
# Search the Metabox indexes.
|
186
|
+
#
|
187
|
+
# query:: the query to perform. See XXXX for query syntax
|
188
|
+
# params:: additional query parameters (see below)
|
189
|
+
#
|
190
|
+
# The _params_ hash can contain the following values:
|
191
|
+
# * *max*: The maximum number of results to return (default is 10)
|
192
|
+
# * *offset*: Offset into the query results (for paging; default is 0)
|
193
|
+
# * *sort*: ordered list of fields to be used when applying sorting
|
194
|
+
# * *xsl-uri*: URL of an XSLT transform to be applied to the results, transforming the default RSS 1.0 results format into an alternative representation
|
195
|
+
# * *content-type*: when applying an XSLT transform, the content type to use when returning the results
|
196
|
+
#
|
197
|
+
# Any additional entries in the _params_ hash will be passed through to the Platform.
|
198
|
+
# These parameters will only be used when an XSLT transformation is being applied, in which case they
|
199
|
+
# will be provided as parameters to the stylesheet.
|
200
|
+
def search(query, params=nil)
|
201
|
+
u = self.build_uri("/items")
|
202
|
+
search_params = get_search_params(u, query, params)
|
203
|
+
response = @client.get(u, search_params, nil)
|
204
|
+
return response
|
205
|
+
|
206
|
+
end
|
207
|
+
|
208
|
+
# Perform a facetted search against the Metabox indexes.
|
209
|
+
#
|
210
|
+
# query:: the query to perform. See XXXX for query syntax
|
211
|
+
# facets:: an ordered list of facets to be used
|
212
|
+
# params:: additional query parameters (see below)
|
213
|
+
#
|
214
|
+
# The _params_ hash can contain the following values:
|
215
|
+
# * *top*: the maximum number of results to return for each facet
|
216
|
+
# * *output*: the preferred response format, can be html or xml (the default)
|
217
|
+
def facet(query, facets, params=nil)
|
218
|
+
if facets == nil or facets.empty?
|
219
|
+
#todo
|
220
|
+
throw
|
221
|
+
end
|
222
|
+
u = self.build_uri("/services/facet")
|
223
|
+
search_params = get_search_params(u, query, params)
|
224
|
+
search_params["fields"] = facets.join(",")
|
225
|
+
response = @client.get(u, search_params, nil)
|
226
|
+
return response
|
227
|
+
end
|
228
|
+
|
229
|
+
def get_search_params(u, query, params)
|
230
|
+
if params != nil
|
231
|
+
search_params = params.clone()
|
232
|
+
else
|
233
|
+
search_params = Hash.new
|
234
|
+
end
|
235
|
+
search_params["query"] = query
|
236
|
+
return search_params
|
237
|
+
end
|
238
|
+
|
239
|
+
# Augment an RSS feed that can be retrieved from the specified URL, against data in this store
|
240
|
+
#
|
241
|
+
# uri:: the URL for the RSS 1.0 feed
|
242
|
+
def augment_uri(uri)
|
243
|
+
u = self.build_uri("/services/augment")
|
244
|
+
response = @client.get(u, {"data-uri" => uri}, nil)
|
245
|
+
return response
|
246
|
+
end
|
247
|
+
|
248
|
+
# Augment an RSS feed against data int this store by POSTing it to the Platform
|
249
|
+
#
|
250
|
+
# data:: a String containing the RSS feed
|
251
|
+
def augment(data)
|
252
|
+
u = self.build_uri("/services/augment")
|
253
|
+
response = @client.post(u, data, nil)
|
254
|
+
return response
|
255
|
+
end
|
256
|
+
|
257
|
+
#Added appropriate http header for conditional get requests
|
258
|
+
def configure_headers_for_conditional_get(u, headers, etags, if_match)
|
259
|
+
if etags != nil && etags.has_tag?(u)
|
260
|
+
if if_match
|
261
|
+
headers["If-Match"] = etags.get(u)
|
262
|
+
else
|
263
|
+
headers["If-None-Match"] = etags.get(u)
|
264
|
+
end
|
265
|
+
end
|
266
|
+
return headers
|
267
|
+
end
|
268
|
+
|
269
|
+
def record_etags(u, etags, response)
|
270
|
+
if (etags != nil && response.status = 200)
|
271
|
+
etags.add_from_response(u, response)
|
272
|
+
end
|
273
|
+
end
|
274
|
+
|
275
|
+
|
276
|
+
#############
|
277
|
+
# CONTENTBOX
|
278
|
+
#############
|
279
|
+
|
280
|
+
# Store an item in the Contentbox for this store
|
281
|
+
#
|
282
|
+
# f:: a File or other IO object from which data will be read
|
283
|
+
# mimetype:: the mimetype of the object to record in the Platform
|
284
|
+
# uri:: the URI at which to store the item (relative to base uri for the store). If nil, then a URI will be assigned by the Platform
|
285
|
+
#
|
286
|
+
# When a _uri_ is not specified, then the Platform will return a 201 Created response with a Location header containing the URI of the
|
287
|
+
# newly stored item. If a URI is specified then a successful request will result in a 200 OK response.
|
288
|
+
def upload_item(f, mimetype, uri=nil)
|
289
|
+
data = f.read()
|
290
|
+
f.close()
|
291
|
+
headers = {"Content-Type" => mimetype}
|
292
|
+
if uri == nil
|
293
|
+
u = self.build_uri("/items")
|
294
|
+
response = @client.post(u, data, headers)
|
295
|
+
else
|
296
|
+
if !uri.start_with?(@storeuri)
|
297
|
+
uri = build_uri(uri)
|
298
|
+
end
|
299
|
+
response = @client.put(uri, data, headers)
|
300
|
+
end
|
301
|
+
return response
|
302
|
+
end
|
303
|
+
|
304
|
+
# Delete an item from the Contentbox in this Store
|
305
|
+
# uri:: the URL of the item, can be relative
|
306
|
+
# TODO: conditional deletes
|
307
|
+
def delete_item(uri)
|
308
|
+
if !uri.start_with?(@storeuri)
|
309
|
+
uri = build_uri(uri)
|
310
|
+
end
|
311
|
+
return @client.delete(uri)
|
312
|
+
end
|
313
|
+
|
314
|
+
# Get an item from the Contebtbox.
|
315
|
+
# uri:: the URL of the item, can be relative.
|
316
|
+
#
|
317
|
+
# If the provided URL of the item is not in the Contentbox, then the response will be a redirect to the
|
318
|
+
# RDF description of this item, as available from the Metabox.
|
319
|
+
#
|
320
|
+
# TODO: document etags, redirects
|
321
|
+
def get_item(uri, etags=nil, if_match=false)
|
322
|
+
u = self.build_uri(uri)
|
323
|
+
headers = Hash.new
|
324
|
+
headers = configure_headers_for_conditional_get("#{u}", headers, etags, if_match)
|
325
|
+
response = @client.get(u, nil, headers)
|
326
|
+
record_etags("#{u}", etags, response)
|
327
|
+
return response
|
328
|
+
end
|
329
|
+
|
330
|
+
#############
|
331
|
+
# JOBS
|
332
|
+
#############
|
333
|
+
|
334
|
+
# Construct an RDF/XML document containing a job request for submitting to the Platform.
|
335
|
+
#
|
336
|
+
# t:: a Time object, specifying the time at which the request should be carried out
|
337
|
+
# joburi:: the URI for the JobType that should be created
|
338
|
+
# label:: a label for this job.
|
339
|
+
def build_job_request(t, joburi, label)
|
340
|
+
time = t.strftime("%Y-%m-%dT%H:%M:%SZ")
|
341
|
+
data = "<rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" "
|
342
|
+
data << " xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\" "
|
343
|
+
data << " xmlns:bf=\"http://schemas.talis.com/2006/bigfoot/configuration#\"> "
|
344
|
+
data << " <bf:JobRequest>"
|
345
|
+
data << " <rdfs:label>#{label}</rdfs:label>"
|
346
|
+
data << " <bf:jobType rdf:resource=\"#{joburi}\"/>"
|
347
|
+
data << " <bf:startTime>#{time}</bf:startTime>"
|
348
|
+
data << " </bf:JobRequest>"
|
349
|
+
data << "</rdf:RDF>"
|
350
|
+
return data
|
351
|
+
end
|
352
|
+
|
353
|
+
def reset(t=Time.now)
|
354
|
+
return submit_job(JOB_RESET, "Reset my store", t)
|
355
|
+
end
|
356
|
+
|
357
|
+
def reindex(t=Time.now)
|
358
|
+
return submit_job(JOB_REINDEX, "Reindex my store", t)
|
359
|
+
end
|
360
|
+
|
361
|
+
def snapshot(t=Time.now)
|
362
|
+
return submit_job(JOB_SNAPSHOT, "Snapshot my store", t)
|
363
|
+
end
|
364
|
+
|
365
|
+
def submit_job(joburi, label, t=Time.now)
|
366
|
+
u = build_uri("/jobs")
|
367
|
+
data = build_job_request(t, joburi, label)
|
368
|
+
response = @client.post(u, data, RDF_XML )
|
369
|
+
return response
|
370
|
+
end
|
371
|
+
|
372
|
+
#############
|
373
|
+
# ADMIN
|
374
|
+
#############
|
375
|
+
|
376
|
+
def status()
|
377
|
+
u = build_uri("/config/access-status")
|
378
|
+
response = @client.get_content(u, nil, ACCEPT_JSON )
|
379
|
+
json = JSON.parse( response )
|
380
|
+
state = Hash.new
|
381
|
+
state["retryInterval"] = json[u.to_s]["http:\/\/schemas.talis.com\/2006\/bigfoot\/configuration#retryInterval"][0]["value"]
|
382
|
+
state["statusMessage"] = json[u.to_s]["http:\/\/schemas.talis.com\/2006\/bigfoot\/configuration#statusMessage"][0]["value"]
|
383
|
+
state["accessMode"] = json[u.to_s]["http:\/\/schemas.talis.com\/2006\/bigfoot\/configuration#accessMode"][0]["value"]
|
384
|
+
return state
|
385
|
+
end
|
386
|
+
|
387
|
+
end
|
388
|
+
|
389
|
+
end
|