talia_core 0.5.4 → 0.7.0
Sign up to get free protection for your applications and to get access to all the features.
- data/VERSION.yml +2 -2
- data/config/talia_core.yml.example +37 -35
- data/generators/talia_admin/templates/app/models/fake_source.rb +93 -0
- data/generators/talia_admin/templates/app/models/talia_collection.rb +13 -37
- data/generators/talia_base/talia_base_generator.rb +0 -1
- data/generators/talia_base/templates/app/controllers/custom_templates_controller.rb +2 -1
- data/generators/talia_base/templates/app/controllers/sources_controller.rb +1 -1
- data/generators/talia_base/templates/script/configure_talia +56 -73
- data/generators/talia_swicky/talia_swicky_generator.rb +18 -0
- data/generators/talia_swicky/templates/app/controllers/swicky_notebooks_controller.rb +111 -0
- data/generators/talia_swicky/templates/app/helpers/swicky_notebooks_helper.rb +29 -0
- data/generators/talia_swicky/templates/app/views/swicky_notebooks/index.builder +6 -0
- data/generators/talia_swicky/templates/app/views/swicky_notebooks/index.html.erb +10 -0
- data/generators/talia_swicky/templates/app/views/swicky_notebooks/show.html.erb +11 -0
- data/generators/talia_swicky/templates/test/fixtures/notebook.rdf +862 -0
- data/generators/talia_swicky/templates/test/functional/swicky_notebooks_controller_test.rb +44 -0
- data/lib/core_ext/boolean.rb +23 -0
- data/lib/core_ext/jdbc_rake_monkeypatch.rb +22 -0
- data/lib/core_ext/nil_class.rb +11 -0
- data/lib/core_ext/object.rb +34 -0
- data/lib/core_ext/string.rb +15 -0
- data/lib/custom_template.rb +3 -1
- data/lib/loader_helper.rb +16 -3
- data/lib/mysql.rb +7 -7
- data/lib/progressbar.rb +2 -2
- data/lib/swicky/exhibit_json/item.rb +129 -0
- data/lib/swicky/exhibit_json/item_collection.rb +129 -0
- data/lib/swicky/fragment.rb +0 -0
- data/lib/swicky/note.rb +7 -0
- data/lib/swicky/notebook.rb +78 -12
- data/lib/talia_core/active_source.rb +45 -13
- data/lib/talia_core/active_source_parts/class_methods.rb +154 -26
- data/lib/talia_core/active_source_parts/finders.rb +49 -26
- data/lib/talia_core/active_source_parts/predicate_handler.rb +71 -23
- data/lib/talia_core/active_source_parts/rdf/ntriples_reader.rb +13 -0
- data/lib/talia_core/active_source_parts/rdf/rdf_reader.rb +99 -0
- data/lib/talia_core/active_source_parts/rdf/rdfxml_reader.rb +12 -0
- data/lib/talia_core/active_source_parts/{rdf.rb → rdf_handler.rb} +52 -19
- data/lib/talia_core/active_source_parts/xml/generic_reader.rb +151 -260
- data/lib/talia_core/active_source_parts/xml/generic_reader_add_statements.rb +97 -0
- data/lib/talia_core/active_source_parts/xml/generic_reader_helpers.rb +88 -0
- data/lib/talia_core/active_source_parts/xml/generic_reader_import_statements.rb +239 -0
- data/lib/talia_core/active_source_parts/xml/rdf_builder.rb +14 -7
- data/lib/talia_core/active_source_parts/xml/source_builder.rb +7 -3
- data/lib/talia_core/active_source_parts/xml/source_reader.rb +17 -2
- data/lib/talia_core/collection.rb +192 -1
- data/lib/talia_core/data_types/data_loader.rb +88 -18
- data/lib/talia_core/data_types/data_record.rb +24 -2
- data/lib/talia_core/data_types/delayed_copier.rb +13 -3
- data/lib/talia_core/data_types/file_record.rb +24 -13
- data/lib/talia_core/data_types/file_store.rb +111 -94
- data/lib/talia_core/data_types/iip_data.rb +104 -23
- data/lib/talia_core/data_types/iip_loader.rb +102 -56
- data/lib/talia_core/data_types/image_data.rb +3 -1
- data/lib/talia_core/data_types/media_link.rb +4 -1
- data/lib/talia_core/data_types/mime_mapping.rb +65 -38
- data/lib/talia_core/data_types/path_helpers.rb +23 -17
- data/lib/talia_core/data_types/pdf_data.rb +9 -6
- data/lib/talia_core/data_types/simple_text.rb +5 -4
- data/lib/talia_core/data_types/xml_data.rb +53 -25
- data/lib/talia_core/dummy_handler.rb +3 -2
- data/lib/talia_core/errors.rb +13 -27
- data/lib/talia_core/initializer.rb +44 -4
- data/lib/talia_core/oai/active_source_model.rb +13 -6
- data/lib/talia_core/oai/active_source_oai_adapter.rb +13 -12
- data/lib/talia_core/rdf_import.rb +1 -1
- data/lib/talia_core/rdf_resource.rb +2 -1
- data/lib/talia_core/semantic_collection_wrapper.rb +143 -151
- data/lib/talia_core/semantic_property.rb +4 -0
- data/lib/talia_core/semantic_relation.rb +84 -33
- data/lib/talia_core/source.rb +45 -25
- data/lib/talia_core/source_fragment.rb +7 -0
- data/lib/talia_core/source_transfer_object.rb +3 -1
- data/lib/talia_core/source_types/agent.rb +16 -0
- data/lib/talia_core/source_types/dc_resource.rb +3 -3
- data/lib/talia_core/source_types/marcont_resource.rb +15 -0
- data/lib/talia_core/source_types/skos_concept.rb +17 -0
- data/lib/talia_dependencies.rb +1 -1
- data/lib/talia_util.rb +1 -1
- data/lib/talia_util/bar_progressor.rb +1 -1
- data/lib/talia_util/image_conversions.rb +8 -2
- data/lib/talia_util/import_job_helper.rb +40 -3
- data/lib/talia_util/io_helper.rb +15 -4
- data/lib/talia_util/progressable.rb +50 -1
- data/lib/talia_util/rake_tasks.rb +3 -21
- data/lib/talia_util/test_helpers.rb +6 -1
- data/lib/talia_util/util.rb +108 -27
- data/lib/talia_util/xml/base_builder.rb +28 -1
- data/lib/talia_util/xml/rdf_builder.rb +81 -5
- data/lib/tasks/talia_core_tasks.rake +2 -0
- data/test/core_ext/boolean_test.rb +26 -0
- data/test/core_ext/nil_class_test.rb +14 -0
- data/test/core_ext/object_test.rb +26 -0
- data/test/core_ext/string_test.rb +11 -0
- data/test/swicky/json_encoder_test.rb +51 -42
- data/test/swicky/notebook_test.rb +13 -6
- data/test/talia_core/active_source_finder_interface_test.rb +30 -0
- data/test/talia_core/active_source_test.rb +445 -34
- data/test/talia_core/collection_test.rb +332 -0
- data/test/talia_core/data_types/file_record_test.rb +2 -23
- data/test/talia_core/ntriples_reader_test.rb +49 -0
- data/test/talia_core/rdfxml_reader_test.rb +51 -0
- data/test/talia_core/source_test.rb +12 -0
- data/test/talia_util/import_job_helper_test.rb +19 -12
- metadata +190 -90
- data/config/database.yml +0 -19
- data/config/rdfstore.yml +0 -13
- data/config/talia_core.yml +0 -24
- data/generators/talia_base/templates/migrations/bj_migration.rb +0 -10
- data/lib/JXslt/jxslt.rb +0 -60
- data/lib/swicky/json_encoder.rb +0 -179
- data/lib/talia_core/agent.rb +0 -14
- data/lib/talia_core/background_jobs/job.rb +0 -82
- data/lib/talia_core/background_jobs/progress_job.rb +0 -68
- data/lib/talia_core/data_types/temp_file_handling.rb +0 -85
- data/lib/talia_core/ordered_source.rb +0 -228
- data/lib/talia_core/semantic_collection_item.rb +0 -94
- data/lib/talia_core/source_types/collection.rb +0 -15
- data/lib/talia_util/progressbar.rb +0 -236
- data/tasks/talia_core_tasks.rake +0 -2
- data/test/talia_core/ordered_source_test.rb +0 -394
- data/test/talia_core/semantic_collection_item_test.rb +0 -125
data/config/database.yml
DELETED
@@ -1,19 +0,0 @@
|
|
1
|
-
---
|
2
|
-
development:
|
3
|
-
adapter: jdbcmysql
|
4
|
-
database: testx_talia_development
|
5
|
-
username: talia
|
6
|
-
password: talia
|
7
|
-
host: localhost
|
8
|
-
test:
|
9
|
-
adapter: jdbcmysql
|
10
|
-
database: testx_talia_test
|
11
|
-
username: talia
|
12
|
-
password: talia
|
13
|
-
host: localhost
|
14
|
-
production:
|
15
|
-
adapter: jdbcmysql
|
16
|
-
database: testx_talia_production
|
17
|
-
username: talia
|
18
|
-
password: talia
|
19
|
-
host: localhost
|
data/config/rdfstore.yml
DELETED
data/config/talia_core.yml
DELETED
@@ -1,24 +0,0 @@
|
|
1
|
-
---
|
2
|
-
local_uri: http://localhost:5000/
|
3
|
-
default_namespace_uri: http://default.dummy/
|
4
|
-
rdf_connection_file: rdfstore
|
5
|
-
ardf_log_level: 0
|
6
|
-
standalone_db: true
|
7
|
-
db_file: database
|
8
|
-
namespaces:
|
9
|
-
hyper: http://www.hypernietzsche.org/ontology/
|
10
|
-
foaf: http://xmlns.com/foaf/0.1/
|
11
|
-
skos: http://www.w3.org/2004/02/skos/core#
|
12
|
-
dct: http://purl.org/dc/terms/
|
13
|
-
dcmit: http://purl.org/dc/dcmitype/
|
14
|
-
dcns: http://purl.org/dc/elements/1.1/
|
15
|
-
luccadom: http://trac.talia.discovery-project.eu/wiki/LuccaOntology#
|
16
|
-
talias: http://trac.talia.discovery-project.eu/wiki/StructuralOntology#
|
17
|
-
taliadom: http://trac.talia.discovery-project.eu/wiki/SampleDomainOntology#
|
18
|
-
sesame: http://www.openrdf.org/schema/sesame#
|
19
|
-
swicky: http://discovery-project.eu/ontologies/philoSpace/
|
20
|
-
discovery: http://discovery-project.eu/ontologies/scholar/0.1/
|
21
|
-
site_name: Talia Test
|
22
|
-
iip_server_uri: http://localhost:80/fcgi-bin/iipsrv.fcgi
|
23
|
-
vips_command: /opt/local/vips
|
24
|
-
convert_command: /usr/local/bin/convert
|
data/lib/JXslt/jxslt.rb
DELETED
@@ -1,60 +0,0 @@
|
|
1
|
-
include Java if jruby?
|
2
|
-
module JXslt
|
3
|
-
Dir["#{RAILS_ROOT}/lib/saxon*.jar"].each { |jar| require jar }
|
4
|
-
include_class "javax.xml.transform.TransformerFactory"
|
5
|
-
include_class "javax.xml.transform.Transformer"
|
6
|
-
include_class "javax.xml.transform.stream.StreamSource"
|
7
|
-
include_class "javax.xml.transform.stream.StreamResult"
|
8
|
-
include_class "java.lang.System"
|
9
|
-
|
10
|
-
class XsltProcessor
|
11
|
-
|
12
|
-
puts $CLASSPATH
|
13
|
-
|
14
|
-
|
15
|
-
def transform(xslt, infile, outfile, options)
|
16
|
-
if options[:in] == "stream"
|
17
|
-
in_var = StreamSource.new(infile)
|
18
|
-
else
|
19
|
-
sr = java.io.StringReader.new(infile)
|
20
|
-
in_var = StreamSource.new(sr)
|
21
|
-
end
|
22
|
-
if options[:out] == "stream"
|
23
|
-
out_var = StreamResult.new(outfile)
|
24
|
-
else
|
25
|
-
sw = java.io.StringWriter.new()
|
26
|
-
out_var = StreamResult.new(sw)
|
27
|
-
end
|
28
|
-
if options[:xslt] == "stream"
|
29
|
-
xslt_var = StreamSource.new(xslt)
|
30
|
-
else
|
31
|
-
sxs = java.io.StringReader.new(xslt)
|
32
|
-
xslt_var = StreamSource.new(sxs)
|
33
|
-
end
|
34
|
-
transformer = @tf.newTransformer(xslt_var)
|
35
|
-
unless options[:transformer_parameters].nil?
|
36
|
-
options[:transformer_parameters].each do |key, value|
|
37
|
-
transformer.setParameter(key, java.lang.String.new(value))
|
38
|
-
end
|
39
|
-
end
|
40
|
-
transformer.transform(in_var, out_var)
|
41
|
-
if options[:out] != "stream"
|
42
|
-
outfile = sw.toString()
|
43
|
-
end
|
44
|
-
end
|
45
|
-
end # XsltProcessor
|
46
|
-
class Saxon < XsltProcessor
|
47
|
-
TRANSFORMER_FACTORY_IMPL = "net.sf.saxon.TransformerFactoryImpl"
|
48
|
-
def initialize
|
49
|
-
System.setProperty("javax.xml.transform.TransformerFactory", TRANSFORMER_FACTORY_IMPL)
|
50
|
-
@tf = TransformerFactory.newInstance
|
51
|
-
end
|
52
|
-
end
|
53
|
-
class Xalan < XsltProcessor
|
54
|
-
TRANSFORMER_FACTORY_IMPL = "org.apache.xalan.processor.TransformerFactoryImpl"
|
55
|
-
def initialize
|
56
|
-
System.setProperty("javax.xml.transform.TransformerFactory", TRANSFORMER_FACTORY_IMPL)
|
57
|
-
@tf = TransformerFactory.newInstance
|
58
|
-
end
|
59
|
-
end
|
60
|
-
end if jruby?
|
data/lib/swicky/json_encoder.rb
DELETED
@@ -1,179 +0,0 @@
|
|
1
|
-
module Swicky
|
2
|
-
|
3
|
-
# Takes a number of triples and encodes them in the SIMILE JSON format
|
4
|
-
class JsonEncoder
|
5
|
-
|
6
|
-
def initialize(triples)
|
7
|
-
@triples = triples
|
8
|
-
@types_hash = {}
|
9
|
-
@properties_hash = {}
|
10
|
-
@label_hash = {}
|
11
|
-
@label_inverse = {}
|
12
|
-
end
|
13
|
-
|
14
|
-
def to_json
|
15
|
-
puts @triples.inspect
|
16
|
-
@items ||= begin
|
17
|
-
items = []
|
18
|
-
# First a round to make sure that each item has a label
|
19
|
-
triple_hash.each { |object, values| values['label'] = make_label!(object, values) }
|
20
|
-
# Now build the items themselves
|
21
|
-
triple_hash.each { |object, values| items += build_item(object, values) }
|
22
|
-
items
|
23
|
-
end
|
24
|
-
# hashy = { 'items' => @items, 'types' => @types_hash, 'properties' => @properties_hash }
|
25
|
-
# puts hashy.inspect
|
26
|
-
{ 'items' => @items, 'types' => @types_hash, 'properties' => @properties_hash }.to_json
|
27
|
-
end
|
28
|
-
|
29
|
-
private
|
30
|
-
|
31
|
-
def triple_hash
|
32
|
-
@triple_hash ||= begin
|
33
|
-
triple_hash = {}
|
34
|
-
# Sort all the triples into a hash, mapping all predicates
|
35
|
-
# to a single subject entry and all objects into a single
|
36
|
-
# predicate entry for each subject or predicate
|
37
|
-
@triples.each do |triple|
|
38
|
-
subject = triple.shift.to_uri
|
39
|
-
triple_hash[subject] ||= {}
|
40
|
-
predicate = triple.shift.to_uri
|
41
|
-
triple_hash[subject][predicate] ||= []
|
42
|
-
triple_hash[subject][predicate] << triple.first
|
43
|
-
end
|
44
|
-
triple_hash
|
45
|
-
end
|
46
|
-
end
|
47
|
-
|
48
|
-
# Builds the entry for one "item". This alwasys returns an array, as there may
|
49
|
-
# be additional items created as placeholders for label references
|
50
|
-
def build_item(object, values)
|
51
|
-
items = []
|
52
|
-
item = {}
|
53
|
-
item['uri'] = object.to_s
|
54
|
-
|
55
|
-
item['type'] = make_types!(values)
|
56
|
-
item['label'] = values.delete('label')
|
57
|
-
|
58
|
-
# Add the normal predicates
|
59
|
-
values.each do |predicate, objects|
|
60
|
-
predicate_local = make_predicate_local(predicate)
|
61
|
-
resource_labels, additional_items = as_labels(objects)
|
62
|
-
item[predicate_local] = resource_labels
|
63
|
-
items += additional_items
|
64
|
-
end
|
65
|
-
|
66
|
-
items << item
|
67
|
-
items
|
68
|
-
end
|
69
|
-
|
70
|
-
# Turns the given resources into label references
|
71
|
-
def as_labels(resources)
|
72
|
-
additional_items = []
|
73
|
-
labels = resources.collect do |res|
|
74
|
-
label, additional = resource_label(res)
|
75
|
-
additional_items << additional if(additional)
|
76
|
-
label
|
77
|
-
end
|
78
|
-
labels.uniq!
|
79
|
-
[(labels.size == 1) ? labels.first : labels, additional_items]
|
80
|
-
end
|
81
|
-
|
82
|
-
def resource_label(resource)
|
83
|
-
return resource if(resource.is_a?(String))
|
84
|
-
return @label_inverse[resource.to_s] if(@label_inverse[resource.to_s])
|
85
|
-
label = make_label!(resource, {})
|
86
|
-
[label, {'uri' => resource.to_s, 'label' => label}]
|
87
|
-
end
|
88
|
-
|
89
|
-
# Check the type definitions from the given hash and transform them
|
90
|
-
# to "type" entries in the JSON structure
|
91
|
-
def make_types!(hash)
|
92
|
-
# First, check for the types
|
93
|
-
types = hash.delete(get_key_from(N::RDF.type, hash)) || []
|
94
|
-
# Add a default type if we don't have one
|
95
|
-
if(types.empty?)
|
96
|
-
types = [ N::RDF.Resource ]
|
97
|
-
end
|
98
|
-
# All types will be referred to by their local name
|
99
|
-
types.collect! { |type| make_type_local(type) }
|
100
|
-
types.uniq!
|
101
|
-
types
|
102
|
-
end
|
103
|
-
|
104
|
-
|
105
|
-
# Create the label: Either use the RDFS label or the last part of th
|
106
|
-
# uri. This also inserts the label into an "inverse" hash so that
|
107
|
-
# labels can be looked up by the uri
|
108
|
-
def make_label!(uri, hash)
|
109
|
-
return @label_inverse[uri.to_s] if(@label_inverse[uri.to_s]) # label already exists
|
110
|
-
label = hash.delete(get_key_from(N::RDFS.label, hash)) || []
|
111
|
-
if(label.empty?)
|
112
|
-
label = check_label(label_for(uri), uri)
|
113
|
-
else
|
114
|
-
label = check_label(label.first, uri)
|
115
|
-
end
|
116
|
-
|
117
|
-
label.to_s
|
118
|
-
end
|
119
|
-
|
120
|
-
# Get the part of the the uri that can be used as a label
|
121
|
-
def label_for(uri)
|
122
|
-
/[\/#]?([^\/#]+)\Z/.match(uri.to_s)[1]
|
123
|
-
end
|
124
|
-
|
125
|
-
# Check if the given label can be used, and adapt it if necessary
|
126
|
-
def check_label(label, uri)
|
127
|
-
label = first_free(label, uri.to_s, @label_hash)
|
128
|
-
@label_inverse[uri.to_s] ||= label
|
129
|
-
label
|
130
|
-
end
|
131
|
-
|
132
|
-
# Little kludge for getting badly-specified keys
|
133
|
-
def get_key_from(key, hash)
|
134
|
-
hash.keys.find { |k| k.to_s == key.to_s }
|
135
|
-
end
|
136
|
-
|
137
|
-
# Finds the first "free" element in the hash. This checks
|
138
|
-
# if hash[initial_value] is empty or equal to "value", if that is not the
|
139
|
-
# case if will try "initial_value2", "initial_value3", ... until the
|
140
|
-
# condition is fulfilled
|
141
|
-
def first_free(initial_key, value, hash)
|
142
|
-
candidate = initial_key
|
143
|
-
hash[candidate] ||= value
|
144
|
-
count = 1
|
145
|
-
while(hash[candidate] != value)
|
146
|
-
count += 1
|
147
|
-
candidate = "#{initial_key}#{count}"
|
148
|
-
hash[candidate] ||= value
|
149
|
-
end
|
150
|
-
candidate
|
151
|
-
end
|
152
|
-
|
153
|
-
# Create the local name for the predicate, and add the definition
|
154
|
-
# to the "properties" hash if necessary. This will also attempt to
|
155
|
-
# avoid collisions if some predicates map to the same local name
|
156
|
-
#
|
157
|
-
# TODO: Doesn't force RDF:label etc to map to the "correct" local
|
158
|
-
# name in case of collisions
|
159
|
-
def make_predicate_local(predicate)
|
160
|
-
first_free(predicate.to_uri.local_name, { 'uri' => predicate.to_s, 'valueType' => 'item' }, @properties_hash)
|
161
|
-
end
|
162
|
-
|
163
|
-
# Making local for types
|
164
|
-
def make_type_local(type)
|
165
|
-
first_free(type.to_uri.local_name, { 'uri' => type.to_s }, @types_hash)
|
166
|
-
end
|
167
|
-
|
168
|
-
|
169
|
-
# The entry for the "properties" hash for the given predicate
|
170
|
-
def predicate_property(predicate_uri)
|
171
|
-
{
|
172
|
-
"uri" => predicate_url,
|
173
|
-
"valueType" => "item"
|
174
|
-
}
|
175
|
-
end
|
176
|
-
|
177
|
-
end
|
178
|
-
|
179
|
-
end
|
data/lib/talia_core/agent.rb
DELETED
@@ -1,14 +0,0 @@
|
|
1
|
-
module TaliaCore
|
2
|
-
|
3
|
-
# Some item that "has the power to act". This can either be a person or another
|
4
|
-
# entity, like an institution or a corporation
|
5
|
-
class Agent < Source
|
6
|
-
|
7
|
-
has_rdf_type N::DCT.Agent
|
8
|
-
|
9
|
-
singular_property :name, N::DCNS.title
|
10
|
-
singular_property :description, N::DCNS.description
|
11
|
-
|
12
|
-
end
|
13
|
-
|
14
|
-
end
|
@@ -1,82 +0,0 @@
|
|
1
|
-
module TaliaCore
|
2
|
-
module BackgroundJobs
|
3
|
-
|
4
|
-
# Exception class for blocked jobs
|
5
|
-
class JobBlockedError < RuntimeError
|
6
|
-
end
|
7
|
-
|
8
|
-
# A Background job run from Talia. The Job class is designed to be able to run long-running
|
9
|
-
# tasks both from the command line and Talia's background job runner.
|
10
|
-
class Job
|
11
|
-
|
12
|
-
# Creates a background job with progress metering. If a tag is given, it will attempt to block
|
13
|
-
# the creation of further jobs with the same tag. This will also use the runner script, called
|
14
|
-
# with the current ruby binary
|
15
|
-
def self.submit_with_progress(jobs, options = {})
|
16
|
-
# add the runner script and ruby call to the jobs
|
17
|
-
jobs = make_jobs(jobs)
|
18
|
-
Bj.submit(jobs, options) do |job|
|
19
|
-
if(tag = job.tag)
|
20
|
-
tagged = Bj.table.job.find(:all, :conditions => ["(state != 'finished' and state != 'dead' and tag = ?)", tag])
|
21
|
-
# The error will break the transation and leave the db in a clean state
|
22
|
-
raise(JobBlockedError, "Tried to create another job with tag #{tag}.") unless(tagged.size == 1)
|
23
|
-
end
|
24
|
-
# Update the environment so the runner can find the job id
|
25
|
-
job.env['JOB_ID'] ||= job.id.to_s
|
26
|
-
job.save!
|
27
|
-
job
|
28
|
-
end
|
29
|
-
end
|
30
|
-
|
31
|
-
# Runs the block with an active progress meter, creating the progress object before starting, and
|
32
|
-
# deleting it from the db after completion. This way the progress_jobs table should remain mostly
|
33
|
-
# clean.
|
34
|
-
def self.run_progress_job
|
35
|
-
job_id = ENV['JOB_ID']
|
36
|
-
raise(RuntimeError, "Cannot run job: Job id not given or non-existent (#{job_id})") unless(job_id && Bj.table.job.exists?(job_id))
|
37
|
-
ProgressJob.create_progress!(job_id) unless(ProgressJob.exists?(:job_id => job_id))
|
38
|
-
yield
|
39
|
-
ensure
|
40
|
-
job_id = ENV['JOB_ID']
|
41
|
-
ProgressJob.delete(:job_id => job_id)
|
42
|
-
end
|
43
|
-
|
44
|
-
# Runs the block with the progress meter for the current job. This may be used multiple times.
|
45
|
-
def self.run_with_progress(message, item_count)
|
46
|
-
# Wrap this in the progress job call. This way it will work fine standalone
|
47
|
-
run_progress_job do
|
48
|
-
job_id = ENV['JOB_ID']
|
49
|
-
# Create the progress meter
|
50
|
-
progress = ProgressJob.find(:first, :conditions => {:job_id => job_id})
|
51
|
-
raise(RuntimeError, 'Progress meter not found for job.') unless progress
|
52
|
-
progress.update_attributes(:item_count => item_count, :progress_message => message, :processed_count => 0, :started_at => Time.now)
|
53
|
-
|
54
|
-
yield(progress)
|
55
|
-
|
56
|
-
progress.finish
|
57
|
-
end
|
58
|
-
end
|
59
|
-
|
60
|
-
private
|
61
|
-
|
62
|
-
def self.ruby
|
63
|
-
return @ruby if(@ruby)
|
64
|
-
c = ::Config::CONFIG
|
65
|
-
ruby = File.join(c['bindir'], c['ruby_install_name']) << c['EXEEXT']
|
66
|
-
@ruby = if system('%s -e 42' % ruby)
|
67
|
-
ruby
|
68
|
-
else
|
69
|
-
system('%s -e 42' % 'ruby') ? 'ruby' : warn('no ruby in PATH/CONFIG')
|
70
|
-
end
|
71
|
-
end
|
72
|
-
|
73
|
-
# Make the caller line for each job
|
74
|
-
def self.make_jobs(jobs)
|
75
|
-
jobs = [ jobs ] unless(jobs.kind_of?(Array))
|
76
|
-
jobs.collect { |current_job| "#{ruby} #{File.join('.', 'script', 'runner')} #{File.join('jobs', current_job)}" }
|
77
|
-
end
|
78
|
-
|
79
|
-
end
|
80
|
-
|
81
|
-
end
|
82
|
-
end
|
@@ -1,68 +0,0 @@
|
|
1
|
-
module TaliaCore
|
2
|
-
module BackgroundJobs
|
3
|
-
|
4
|
-
# Helper table to track the current status of a long-running task
|
5
|
-
class ProgressJob < ActiveRecord::Base
|
6
|
-
|
7
|
-
# Minimum interval for database updates.
|
8
|
-
DB_UPDATE_INTERVAL = 2
|
9
|
-
|
10
|
-
validates_numericality_of :job_id, :only_integer => true
|
11
|
-
validates_numericality_of :processed_count, :only_integer => true, :greater_than_or_equal => 0
|
12
|
-
validates_numericality_of :item_count, :only_integer => true, :greater_than => 0
|
13
|
-
|
14
|
-
# Create a new progress job
|
15
|
-
def self.create_progress!(job_id, message = '', item_count = 1)
|
16
|
-
job_prog = new(:job_id => job_id, :progress_message => message, :item_count => item_count, :processed_count => 0)
|
17
|
-
job_prog.save!
|
18
|
-
job_prog
|
19
|
-
end
|
20
|
-
|
21
|
-
# Clears the progress for processes no longer active
|
22
|
-
def self.clear
|
23
|
-
find(:all).each do |job_prog|
|
24
|
-
delete(job_prog.id) if(!Bg.table.job.exists?(job_id) || Bg.tablejob.find(job_id).finished?)
|
25
|
-
end
|
26
|
-
end
|
27
|
-
|
28
|
-
# Increments the number of processed items. To avoid flooding the db, the same object will
|
29
|
-
# only save this value at most all DB_UPDATE_INTERVAL seconds. Will return true if the
|
30
|
-
# element was saved, false otherwise.
|
31
|
-
def inc(inc_value = 1)
|
32
|
-
pc_old = self.processed_count
|
33
|
-
self.processed_count = pc_old + inc_value
|
34
|
-
if(!@last_update || ((Time.now - @last_update) > DB_UPDATE_INTERVAL))
|
35
|
-
save!
|
36
|
-
@last_update = Time.now
|
37
|
-
true
|
38
|
-
else
|
39
|
-
false
|
40
|
-
end
|
41
|
-
end
|
42
|
-
|
43
|
-
def finish
|
44
|
-
self.processed_count = self.item_count
|
45
|
-
save!
|
46
|
-
end
|
47
|
-
|
48
|
-
# The percentage completed
|
49
|
-
def percentage
|
50
|
-
[((self.processed_count * 100) / self.item_count), 100].min
|
51
|
-
end
|
52
|
-
|
53
|
-
# Elapsed time in seconds
|
54
|
-
def elapsed
|
55
|
-
return unless(self.started_at)
|
56
|
-
Time.now - started_at
|
57
|
-
end
|
58
|
-
|
59
|
-
# Returns the estimated time remaining on the current job
|
60
|
-
def eta
|
61
|
-
((elapsed * 100) / percentage) - elapsed
|
62
|
-
end
|
63
|
-
|
64
|
-
|
65
|
-
end
|
66
|
-
|
67
|
-
end
|
68
|
-
end
|