dor-services 5.1.1 → 5.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +8 -8
- data/lib/dor-services.rb +1 -2
- data/lib/dor/config.rb +5 -6
- data/lib/dor/datastreams/content_metadata_ds.rb +17 -20
- data/lib/dor/datastreams/datastream_spec_solrizer.rb +1 -1
- data/lib/dor/datastreams/desc_metadata_ds.rb +7 -7
- data/lib/dor/datastreams/embargo_metadata_ds.rb +2 -7
- data/lib/dor/datastreams/events_ds.rb +9 -9
- data/lib/dor/datastreams/identity_metadata_ds.rb +29 -34
- data/lib/dor/datastreams/rights_metadata_ds.rb +1 -1
- data/lib/dor/datastreams/role_metadata_ds.rb +0 -1
- data/lib/dor/datastreams/simple_dublin_core_ds.rb +12 -13
- data/lib/dor/datastreams/version_metadata_ds.rb +11 -15
- data/lib/dor/datastreams/workflow_definition_ds.rb +18 -22
- data/lib/dor/datastreams/workflow_ds.rb +24 -36
- data/lib/dor/migrations/identifiable/assert_adminPolicy.rb +1 -1
- data/lib/dor/migrations/identifiable/fix_model_assertions.rb +1 -1
- data/lib/dor/migrations/identifiable/record_remediation.rb +2 -2
- data/lib/dor/migrations/identifiable/uriify_augmented_contentlocation_refs.rb +1 -1
- data/lib/dor/migrations/identifiable/uriify_contentlocation_refs.rb +1 -1
- data/lib/dor/migrations/processable/unify_workflows.rb +4 -4
- data/lib/dor/migrations/versionable/add_missing_version_md.rb +2 -2
- data/lib/dor/models/assembleable.rb +2 -2
- data/lib/dor/models/collection.rb +1 -0
- data/lib/dor/models/contentable.rb +3 -3
- data/lib/dor/models/describable.rb +16 -13
- data/lib/dor/models/editable.rb +3 -3
- data/lib/dor/models/embargoable.rb +2 -2
- data/lib/dor/models/eventable.rb +2 -2
- data/lib/dor/models/geoable.rb +14 -18
- data/lib/dor/models/governable.rb +1 -1
- data/lib/dor/models/identifiable.rb +36 -57
- data/lib/dor/models/itemizable.rb +6 -6
- data/lib/dor/models/presentable.rb +12 -12
- data/lib/dor/models/preservable.rb +2 -5
- data/lib/dor/models/processable.rb +19 -25
- data/lib/dor/models/publishable.rb +2 -2
- data/lib/dor/models/releaseable.rb +165 -212
- data/lib/dor/models/shelvable.rb +10 -14
- data/lib/dor/models/upgradable.rb +11 -11
- data/lib/dor/models/versionable.rb +16 -21
- data/lib/dor/models/workflow_object.rb +3 -3
- data/lib/dor/services/cleanup_reset_service.rb +32 -27
- data/lib/dor/services/digital_stacks_service.rb +3 -3
- data/lib/dor/services/merge_service.rb +4 -8
- data/lib/dor/services/metadata_handlers/catalog_handler.rb +1 -1
- data/lib/dor/services/metadata_handlers/mdtoolkit_handler.rb +4 -6
- data/lib/dor/services/metadata_service.rb +20 -22
- data/lib/dor/services/registration_service.rb +6 -8
- data/lib/dor/services/reset_workspace_service.rb +14 -16
- data/lib/dor/services/sdr_ingest_service.rb +2 -6
- data/lib/dor/services/search_service.rb +3 -3
- data/lib/dor/services/suri_service.rb +2 -3
- data/lib/dor/services/technical_metadata_service.rb +2 -3
- data/lib/dor/utils/ng_tidy.rb +6 -6
- data/lib/dor/utils/predicate_patch.rb +1 -1
- data/lib/dor/utils/solr_doc_helper.rb +2 -2
- data/lib/dor/version.rb +1 -1
- data/lib/dor/workflow/document.rb +27 -33
- data/lib/dor/workflow/graph.rb +34 -37
- data/lib/dor/workflow/process.rb +8 -8
- data/lib/tasks/rdoc.rake +5 -5
- metadata +4 -11
- data/bin/dor-indexer +0 -108
- data/bin/dor-indexerd +0 -73
- data/config/certs/robots-dor-dev.crt +0 -29
- data/config/certs/robots-dor-dev.key +0 -27
- data/config/dev_console_env.rb +0 -78
@@ -39,7 +39,7 @@ module Dor
|
|
39
39
|
rights=nil
|
40
40
|
if params[:rights]
|
41
41
|
rights=params[:rights]
|
42
|
-
unless
|
42
|
+
unless %w(world stanford dark default none).include? rights
|
43
43
|
raise Dor::ParameterError, "Unknown rights setting '#{rights}' when calling #{self.name}.register_object"
|
44
44
|
end
|
45
45
|
end
|
@@ -52,7 +52,7 @@ module Dor
|
|
52
52
|
end
|
53
53
|
end
|
54
54
|
|
55
|
-
if (other_ids.
|
55
|
+
if (other_ids.key?(:uuid) || other_ids.key?('uuid')) == false
|
56
56
|
other_ids[:uuid] = UUIDTools::UUID.timestamp_create.to_s
|
57
57
|
end
|
58
58
|
short_label = label.length>254 ? label[0,254] : label
|
@@ -75,21 +75,19 @@ module Dor
|
|
75
75
|
short_predicate = ActiveFedora::RelsExtDatastream.short_predicate rel.namespace.href+rel.name
|
76
76
|
if short_predicate.nil?
|
77
77
|
ix = 0
|
78
|
-
ix += 1 while ActiveFedora::Predicates.predicate_mappings[rel.namespace.href].
|
78
|
+
ix += 1 while ActiveFedora::Predicates.predicate_mappings[rel.namespace.href].key?(short_predicate = :"extra_predicate_#{ix}")
|
79
79
|
ActiveFedora::Predicates.predicate_mappings[rel.namespace.href][short_predicate] = rel.name
|
80
80
|
end
|
81
81
|
new_item.add_relationship short_predicate, rel['rdf:resource']
|
82
82
|
end
|
83
|
-
if collection
|
84
|
-
|
85
|
-
end
|
86
|
-
if (rights && ['item','collection'].include?(object_type))
|
83
|
+
new_item.add_collection(collection) if collection
|
84
|
+
if rights && %w(item collection).include?(object_type)
|
87
85
|
rights_xml=apo_object.defaultObjectRights.ng_xml
|
88
86
|
new_item.datastreams['rightsMetadata'].content=rights_xml.to_s
|
89
87
|
new_item.set_read_rights(rights) unless rights == 'default' # already defaulted to default!
|
90
88
|
end
|
91
89
|
#create basic mods from the label
|
92
|
-
if(metadata_source=='label')
|
90
|
+
if (metadata_source=='label')
|
93
91
|
ds=new_item.build_datastream('descMetadata');
|
94
92
|
builder = Nokogiri::XML::Builder.new { |xml|
|
95
93
|
xml.mods( 'xmlns' => 'http://www.loc.gov/mods/v3', 'xmlns:xsi' => 'http://www.w3.org/2001/XMLSchema-instance',:version => '3.3', "xsi:schemaLocation" => 'http://www.loc.gov/mods/v3 http://www.loc.gov/standards/mods/v3/mods-3-3.xsd'){
|
@@ -4,32 +4,30 @@ module Dor
|
|
4
4
|
class ResetWorkspaceService
|
5
5
|
|
6
6
|
def self.reset_workspace_druid_tree(druid, version, workspace_root)
|
7
|
-
|
7
|
+
|
8
8
|
druid_tree_path = DruidTools::Druid.new(druid, workspace_root).pathname.to_s
|
9
|
-
|
10
|
-
raise "The archived directory #{druid_tree_path}_v#{version} already existed." if File.exists?("#{druid_tree_path}_v#{version}")
|
11
|
-
|
12
|
-
if File.exists?(druid_tree_path)
|
9
|
+
|
10
|
+
raise "The archived directory #{druid_tree_path}_v#{version} already existed." if File.exists?("#{druid_tree_path}_v#{version}")
|
11
|
+
|
12
|
+
if File.exists?(druid_tree_path)
|
13
13
|
FileUtils.mv(druid_tree_path, "#{druid_tree_path}_v#{version}")
|
14
14
|
end #Else is a truncated tree where we shouldn't do anything
|
15
15
|
|
16
16
|
end
|
17
17
|
|
18
18
|
def self.reset_export_bag(druid, version, export_root)
|
19
|
-
|
19
|
+
|
20
20
|
id = druid.split(':').last
|
21
21
|
bag_dir = File.join(export_root, id)
|
22
22
|
|
23
|
-
raise "The archived bag #{bag_dir}_v#{version} already existed." if File.exists?("#{bag_dir}_v#{version}")
|
24
|
-
|
25
|
-
if File.exists?(bag_dir)
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
if File.exists?("#{bag_dir}.tar")
|
23
|
+
raise "The archived bag #{bag_dir}_v#{version} already existed." if File.exists?("#{bag_dir}_v#{version}")
|
24
|
+
|
25
|
+
FileUtils.mv(bag_dir, "#{bag_dir}_v#{version}") if File.exists?(bag_dir)
|
26
|
+
|
27
|
+
if File.exists?("#{bag_dir}.tar")
|
30
28
|
FileUtils.mv("#{bag_dir}.tar", "#{bag_dir}_v#{version}.tar")
|
31
|
-
end
|
29
|
+
end
|
32
30
|
end
|
33
|
-
|
31
|
+
|
34
32
|
end
|
35
|
-
end
|
33
|
+
end
|
@@ -74,7 +74,7 @@ module Dor
|
|
74
74
|
# If not found, return nil unless it is a required datastream in which case raise exception
|
75
75
|
def self.get_datastream_content(dor_item, ds_name, required)
|
76
76
|
ds = (ds_name == 'relationshipMetadata' ? 'RELS-EXT' : ds_name)
|
77
|
-
if dor_item.datastreams.keys.include?(ds) && !
|
77
|
+
if dor_item.datastreams.keys.include?(ds) && !dor_item.datastreams[ds].new?
|
78
78
|
return dor_item.datastreams[ds].content
|
79
79
|
elsif (required == 'optional')
|
80
80
|
return nil
|
@@ -137,11 +137,7 @@ module Dor
|
|
137
137
|
# @return [String] Return the contents of the contentMetadata.xml file from the content directory
|
138
138
|
def self.get_content_metadata(metadata_dir)
|
139
139
|
content_metadata_pathname = metadata_dir.join('contentMetadata.xml')
|
140
|
-
if content_metadata_pathname.exist?
|
141
|
-
content_metadata_pathname.read
|
142
|
-
else
|
143
|
-
nil
|
144
|
-
end
|
140
|
+
content_metadata_pathname.read if content_metadata_pathname.exist?
|
145
141
|
end
|
146
142
|
|
147
143
|
# @param [Pathname] metadata_dir The location of the the object's metadata files
|
@@ -74,7 +74,7 @@ module Dor
|
|
74
74
|
resp = solr.find params
|
75
75
|
if block_given?
|
76
76
|
cont = true
|
77
|
-
while cont
|
77
|
+
while cont && resp.docs.length > 0
|
78
78
|
cont = yield(resp)
|
79
79
|
params[:rows] ||= resp.docs.length
|
80
80
|
params[:start] += params[:rows]
|
@@ -91,7 +91,7 @@ module Dor
|
|
91
91
|
elsif id.is_a?(Array) # Two values: [ 'google', 'STANFORD_0123456789' ]
|
92
92
|
id = id.join(':')
|
93
93
|
end
|
94
|
-
q = %{#{Solrizer.solr_name 'identifier', :
|
94
|
+
q = %{#{Solrizer.solr_name 'identifier', :stored_searchable}:"#{id}"}
|
95
95
|
result = []
|
96
96
|
resp = query(q, :fl => 'id', :rows => 1000) do |resp|
|
97
97
|
result += resp.docs.collect { |doc| doc['id'] }
|
@@ -123,4 +123,4 @@ module Dor
|
|
123
123
|
|
124
124
|
end
|
125
125
|
|
126
|
-
end
|
126
|
+
end
|
@@ -99,7 +99,7 @@ module Dor
|
|
99
99
|
# The data is updated to the latest format.
|
100
100
|
def self.get_dor_technical_metadata(dor_item)
|
101
101
|
ds = "technicalMetadata"
|
102
|
-
if dor_item.datastreams.keys.include?(ds)
|
102
|
+
if dor_item.datastreams.keys.include?(ds) && !dor_item.datastreams[ds].new?
|
103
103
|
dor_techmd = dor_item.datastreams[ds].content
|
104
104
|
else
|
105
105
|
return nil
|
@@ -127,7 +127,7 @@ module Dor
|
|
127
127
|
# @param [Array<String>] new_files The list of filenames for files that are either added or modifed since the previous version
|
128
128
|
# @return [String] The technicalMetadata datastream for the new files of the new digital object version
|
129
129
|
def self.get_new_technical_metadata(druid, new_files)
|
130
|
-
return nil if new_files.nil?
|
130
|
+
return nil if new_files.nil? || new_files.empty?
|
131
131
|
workspace = DruidTools::Druid.new(druid, Dor::Config.sdr.local_workspace_root)
|
132
132
|
content_dir = workspace.find_filelist_parent('content',new_files)
|
133
133
|
temp_dir = workspace.temp_dir
|
@@ -223,4 +223,3 @@ module Dor
|
|
223
223
|
end
|
224
224
|
|
225
225
|
end
|
226
|
-
|
data/lib/dor/utils/ng_tidy.rb
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
class Nokogiri::XML::Text
|
2
|
-
|
2
|
+
|
3
3
|
def normalize
|
4
4
|
self.content =~ /\S/ ? self.content.gsub(/\s+/,' ').strip : self.content
|
5
5
|
end
|
6
|
-
|
6
|
+
|
7
7
|
def normalize!
|
8
8
|
self.content = self.normalize
|
9
9
|
end
|
@@ -15,11 +15,11 @@ class Nokogiri::XML::Node
|
|
15
15
|
def normalize_text!
|
16
16
|
self.xpath('//text()').each { |t| t.normalize! }
|
17
17
|
end
|
18
|
-
|
18
|
+
|
19
19
|
end
|
20
20
|
|
21
21
|
class Nokogiri::XML::Document
|
22
|
-
|
22
|
+
|
23
23
|
def prettify
|
24
24
|
xslt = Nokogiri::XSLT <<-EOC
|
25
25
|
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
|
@@ -33,5 +33,5 @@ class Nokogiri::XML::Document
|
|
33
33
|
EOC
|
34
34
|
xslt.transform(self).to_xml
|
35
35
|
end
|
36
|
-
|
37
|
-
end
|
36
|
+
|
37
|
+
end
|
data/lib/dor/version.rb
CHANGED
@@ -11,7 +11,7 @@ module Workflow
|
|
11
11
|
t.process {
|
12
12
|
t.name_(:path=>{:attribute=>"name"})
|
13
13
|
t.status(:path=>{:attribute=>"status"})
|
14
|
-
t.timestamp(:path=>{:attribute=>"datetime"})#, :data_type => :date)
|
14
|
+
t.timestamp(:path=>{:attribute=>"datetime"}) #, :data_type => :date)
|
15
15
|
t.elapsed(:path=>{:attribute=>"elapsed"})
|
16
16
|
t.lifecycle(:path=>{:attribute=>"lifecycle"})
|
17
17
|
t.attempts(:path=>{:attribute=>"attempts"}, :index_as => [:not_searchable])
|
@@ -39,13 +39,13 @@ module Workflow
|
|
39
39
|
|
40
40
|
def definition
|
41
41
|
@definition ||= begin
|
42
|
-
if @@definitions.
|
42
|
+
if @@definitions.key? self.workflowId.first
|
43
43
|
@@definitions[self.workflowId.first]
|
44
44
|
else
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
45
|
+
wfo = Dor::WorkflowObject.find_by_name(self.workflowId.first)
|
46
|
+
wf_def=wfo ? wfo.definition : nil
|
47
|
+
@@definitions[self.workflowId.first] = wf_def
|
48
|
+
wf_def
|
49
49
|
end
|
50
50
|
end
|
51
51
|
end
|
@@ -53,9 +53,7 @@ module Workflow
|
|
53
53
|
def graph(parent=nil, dir=nil)
|
54
54
|
wf_definition = self.definition
|
55
55
|
result = wf_definition ? Workflow::Graph.from_processes(wf_definition.repo, wf_definition.name, self.processes, parent) : nil
|
56
|
-
unless result.nil?
|
57
|
-
result['rankdir'] = dir || 'TB'
|
58
|
-
end
|
56
|
+
result['rankdir'] = dir || 'TB' unless result.nil?
|
59
57
|
result
|
60
58
|
end
|
61
59
|
|
@@ -65,9 +63,7 @@ module Workflow
|
|
65
63
|
|
66
64
|
def processes
|
67
65
|
#if the workflow service didnt return any processes, dont return any processes from the reified wf
|
68
|
-
if ng_xml.search("/workflow/process").length == 0
|
69
|
-
return []
|
70
|
-
end
|
66
|
+
return [] if ng_xml.search("/workflow/process").length == 0
|
71
67
|
@processes ||=
|
72
68
|
if self.definition
|
73
69
|
self.definition.processes.collect do |process|
|
@@ -93,7 +89,7 @@ module Workflow
|
|
93
89
|
repo = self.repository.first
|
94
90
|
wf_solr_type = :string
|
95
91
|
wf_solr_attrs = [:symbol]
|
96
|
-
add_solr_value(solr_doc, 'wf',
|
92
|
+
add_solr_value(solr_doc, 'wf', wf_name, wf_solr_type, wf_solr_attrs)
|
97
93
|
add_solr_value(solr_doc, 'wf_wps', wf_name, wf_solr_type, wf_solr_attrs)
|
98
94
|
add_solr_value(solr_doc, 'wf_wsp', wf_name, wf_solr_type, wf_solr_attrs)
|
99
95
|
status = processes.empty? ? 'empty' : (workflow_should_show_completed?(processes) ? 'completed' : 'active')
|
@@ -101,27 +97,25 @@ module Workflow
|
|
101
97
|
add_solr_value(solr_doc, 'workflow_status', [wf_name,status,errors,repo].join('|'), wf_solr_type, wf_solr_attrs)
|
102
98
|
|
103
99
|
processes.each do |process|
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
end
|
109
|
-
add_solr_value(solr_doc, 'wf_error', "#{wf_name}:#{process.name}:#{process.error_message}", wf_solr_type, wf_solr_attrs) if process.error_message #index the error message without the druid so we hopefully get some overlap
|
110
|
-
add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.status}", wf_solr_type, wf_solr_attrs)
|
111
|
-
add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.status}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
112
|
-
add_solr_value(solr_doc, 'wf_wps', "#{wf_name}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
113
|
-
add_solr_value(solr_doc, 'wf_wps', "#{wf_name}:#{process.name}:#{process.status}", wf_solr_type, wf_solr_attrs)
|
114
|
-
add_solr_value(solr_doc, 'wf_swp', "#{process.status}", wf_solr_type, wf_solr_attrs)
|
115
|
-
add_solr_value(solr_doc, 'wf_swp', "#{process.status}:#{wf_name}", wf_solr_type, wf_solr_attrs)
|
116
|
-
add_solr_value(solr_doc, 'wf_swp', "#{process.status}:#{wf_name}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
117
|
-
if process.state != process.status
|
118
|
-
add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.state}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
119
|
-
add_solr_value(solr_doc, 'wf_wps', "#{wf_name}:#{process.name}:#{process.state}", wf_solr_type, wf_solr_attrs)
|
120
|
-
add_solr_value(solr_doc, 'wf_swp', "#{process.state}", wf_solr_type, wf_solr_attrs)
|
121
|
-
add_solr_value(solr_doc, 'wf_swp', "#{process.state}:#{wf_name}", wf_solr_type, wf_solr_attrs)
|
122
|
-
add_solr_value(solr_doc, 'wf_swp', "#{process.state}:#{wf_name}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
123
|
-
end
|
100
|
+
next unless process.status.present?
|
101
|
+
#add a record of the robot having operated on this item, so we can track robot activity
|
102
|
+
if process.date_time && process.status && (process.status == 'completed' || process.status == 'error')
|
103
|
+
solr_doc["wf_#{wf_name}_#{process.name}_dttsi"] = "#{process.date_time}Z"
|
124
104
|
end
|
105
|
+
add_solr_value(solr_doc, 'wf_error', "#{wf_name}:#{process.name}:#{process.error_message}", wf_solr_type, wf_solr_attrs) if process.error_message #index the error message without the druid so we hopefully get some overlap
|
106
|
+
add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.status}", wf_solr_type, wf_solr_attrs)
|
107
|
+
add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.status}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
108
|
+
add_solr_value(solr_doc, 'wf_wps', "#{wf_name}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
109
|
+
add_solr_value(solr_doc, 'wf_wps', "#{wf_name}:#{process.name}:#{process.status}", wf_solr_type, wf_solr_attrs)
|
110
|
+
add_solr_value(solr_doc, 'wf_swp', "#{process.status}", wf_solr_type, wf_solr_attrs)
|
111
|
+
add_solr_value(solr_doc, 'wf_swp', "#{process.status}:#{wf_name}", wf_solr_type, wf_solr_attrs)
|
112
|
+
add_solr_value(solr_doc, 'wf_swp', "#{process.status}:#{wf_name}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
113
|
+
next unless process.state != process.status
|
114
|
+
add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.state}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
115
|
+
add_solr_value(solr_doc, 'wf_wps', "#{wf_name}:#{process.name}:#{process.state}", wf_solr_type, wf_solr_attrs)
|
116
|
+
add_solr_value(solr_doc, 'wf_swp', "#{process.state}", wf_solr_type, wf_solr_attrs)
|
117
|
+
add_solr_value(solr_doc, 'wf_swp', "#{process.state}:#{wf_name}", wf_solr_type, wf_solr_attrs)
|
118
|
+
add_solr_value(solr_doc, 'wf_swp', "#{process.state}:#{wf_name}:#{process.name}", wf_solr_type, wf_solr_attrs)
|
125
119
|
end
|
126
120
|
|
127
121
|
solr_doc[Solrizer.solr_name('wf_wps', :symbol)].uniq! if solr_doc[Solrizer.solr_name('wf_wps', :symbol)]
|
data/lib/dor/workflow/graph.rb
CHANGED
@@ -3,36 +3,35 @@ require 'graphviz'
|
|
3
3
|
module Dor
|
4
4
|
module Workflow
|
5
5
|
class Graph
|
6
|
-
|
6
|
+
|
7
7
|
FILL_COLORS = { 'waiting' => "white", 'ready' => "white", 'error' => "#8B0000", 'blocked' => "white", 'completed' => "darkgreen", 'unknown' => "#CFCFCF" }
|
8
8
|
TEXT_COLORS = { 'waiting' => "black", 'ready' => "black", 'error' => "white", 'blocked' => "#8B0000", 'completed' => "white", 'unknown' => "black" }
|
9
9
|
PATTERNS = { 'waiting' => "diagonals", 'ready' => "filled", 'error' => "filled", 'blocked' => "diagonals", 'completed' => "filled", 'unknown' => "filled" }
|
10
|
-
RESERVED_KEYS =
|
10
|
+
RESERVED_KEYS = %w(repository name)
|
11
11
|
|
12
12
|
attr_reader :repo, :name, :processes, :graph, :root
|
13
|
-
|
13
|
+
|
14
14
|
def self.from_config(name, config, parent = nil)
|
15
15
|
wf = self.new(config['repository'], name, parent)
|
16
16
|
config.keys.each { |p| wf.add_process(p.to_s) unless RESERVED_KEYS.include?(p) }
|
17
17
|
config.keys.each { |p|
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
end
|
18
|
+
next unless wf.processes[p]
|
19
|
+
Array(config[p]['prerequisite']).each { |prereq|
|
20
|
+
prereq.sub!(/^#{config['repository']}:#{name}:/e,'')
|
21
|
+
if wf.processes[prereq]
|
22
|
+
wf.processes[p].depends_on(wf.processes[prereq])
|
23
|
+
else
|
24
|
+
wf.processes[p].depends_on(wf.add_process(prereq).set_status('external'))
|
25
|
+
end
|
26
|
+
}
|
28
27
|
}
|
29
28
|
wf.finish
|
30
29
|
return wf
|
31
30
|
end
|
32
|
-
|
31
|
+
|
33
32
|
def self.from_processes(repo, name, processes, parent = nil)
|
34
33
|
wf = self.new(repo, name, parent)
|
35
|
-
processes.each { |p|
|
34
|
+
processes.each { |p|
|
36
35
|
wf.add_process(p.name).status = p.state || 'unknown'
|
37
36
|
}
|
38
37
|
processes.each { |p|
|
@@ -48,7 +47,7 @@ class Graph
|
|
48
47
|
wf.finish
|
49
48
|
return wf
|
50
49
|
end
|
51
|
-
|
50
|
+
|
52
51
|
def initialize(repo, name, parent = nil)
|
53
52
|
@repo = repo
|
54
53
|
@name = name
|
@@ -63,22 +62,22 @@ class Graph
|
|
63
62
|
@root.shape = 'plaintext'
|
64
63
|
@processes = {}
|
65
64
|
end
|
66
|
-
|
65
|
+
|
67
66
|
def qname
|
68
67
|
[@repo,@name].join(':')
|
69
68
|
end
|
70
|
-
|
69
|
+
|
71
70
|
def add_process(name, external = false)
|
72
71
|
pqname = name.split(/:/).length == 3 ? name : [qname,name].join(':')
|
73
72
|
p = Process.new(self, pqname, name)
|
74
73
|
@processes[name] = p
|
75
74
|
return p
|
76
75
|
end
|
77
|
-
|
76
|
+
|
78
77
|
def finish
|
79
78
|
@processes.values.each do |process|
|
80
79
|
process.node.fontname = 'Helvetica'
|
81
|
-
if process.id =~ %r{^#{qname}}
|
80
|
+
if process.id =~ %r{^#{qname}} && process.prerequisites.length == 0
|
82
81
|
(@root << process.node)[:arrowhead => 'none', :arrowtail => 'none', :dir => 'both', :style => 'invisible']
|
83
82
|
end
|
84
83
|
end
|
@@ -86,11 +85,11 @@ class Graph
|
|
86
85
|
@root.fontname = 'Helvetica'
|
87
86
|
return self
|
88
87
|
end
|
89
|
-
|
88
|
+
|
90
89
|
def inspect
|
91
90
|
"#{self.to_s[0..-2]} #{repo}:#{name} (#{processes.keys.join(', ')})>"
|
92
91
|
end
|
93
|
-
|
92
|
+
|
94
93
|
def method_missing(sym,*args)
|
95
94
|
if @graph.respond_to?(sym)
|
96
95
|
@graph.send(sym,*args)
|
@@ -98,11 +97,11 @@ class Graph
|
|
98
97
|
super
|
99
98
|
end
|
100
99
|
end
|
101
|
-
|
100
|
+
|
102
101
|
class Process
|
103
|
-
|
102
|
+
|
104
103
|
attr_reader :name, :status, :node, :prerequisites
|
105
|
-
|
104
|
+
|
106
105
|
def initialize(graph, id, name)
|
107
106
|
@name = name
|
108
107
|
@graph = graph
|
@@ -112,11 +111,11 @@ class Graph
|
|
112
111
|
@prerequisites = []
|
113
112
|
self.set_status('unknown')
|
114
113
|
end
|
115
|
-
|
114
|
+
|
116
115
|
def id
|
117
116
|
@node.id
|
118
117
|
end
|
119
|
-
|
118
|
+
|
120
119
|
def status=(s)
|
121
120
|
@status = s
|
122
121
|
if s == 'external'
|
@@ -129,12 +128,12 @@ class Graph
|
|
129
128
|
@node.style = PATTERNS[s]
|
130
129
|
end
|
131
130
|
end
|
132
|
-
|
131
|
+
|
133
132
|
def set_status(s)
|
134
133
|
self.status = s
|
135
134
|
return self
|
136
135
|
end
|
137
|
-
|
136
|
+
|
138
137
|
def depends_on(*processes)
|
139
138
|
wf1 = self.id.split(/:/)[0..1].join(':')
|
140
139
|
processes.each { |process|
|
@@ -143,24 +142,22 @@ class Graph
|
|
143
142
|
edge.dir = 'both'
|
144
143
|
edge.arrowhead = 'none'
|
145
144
|
edge.arrowtail = 'none'
|
146
|
-
if (wf1 != wf2)
|
147
|
-
edge.style = 'dashed'
|
148
|
-
end
|
145
|
+
edge.style = 'dashed' if (wf1 != wf2)
|
149
146
|
self.prerequisites << process
|
150
147
|
}
|
151
148
|
return self
|
152
149
|
end
|
153
|
-
|
150
|
+
|
154
151
|
def same_as(process)
|
155
|
-
@node = process.node
|
152
|
+
@node = process.node
|
156
153
|
end
|
157
|
-
|
154
|
+
|
158
155
|
def all_prerequisites
|
159
156
|
prerequisites.collect { |p| p.all_prerequisites + [p.name] }.flatten.uniq
|
160
157
|
end
|
161
|
-
|
158
|
+
|
162
159
|
end
|
163
160
|
|
164
161
|
end
|
165
162
|
end
|
166
|
-
end
|
163
|
+
end
|