dor-services 5.2.0 → 5.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +5 -13
  2. data/config/certs/robots-dor-dev.crt +29 -0
  3. data/config/certs/robots-dor-dev.key +27 -0
  4. data/config/config_defaults.yml +2 -0
  5. data/config/dev_console_env.rb +77 -0
  6. data/lib/dor-services.rb +31 -27
  7. data/lib/dor/config.rb +25 -19
  8. data/lib/dor/datastreams/administrative_metadata_ds.rb +19 -20
  9. data/lib/dor/datastreams/content_metadata_ds.rb +238 -177
  10. data/lib/dor/datastreams/datastream_spec_solrizer.rb +1 -1
  11. data/lib/dor/datastreams/default_object_rights_ds.rb +99 -16
  12. data/lib/dor/datastreams/desc_metadata_ds.rb +37 -34
  13. data/lib/dor/datastreams/embargo_metadata_ds.rb +16 -16
  14. data/lib/dor/datastreams/events_ds.rb +2 -2
  15. data/lib/dor/datastreams/geo_metadata_ds.rb +5 -10
  16. data/lib/dor/datastreams/identity_metadata_ds.rb +22 -22
  17. data/lib/dor/datastreams/rights_metadata_ds.rb +43 -32
  18. data/lib/dor/datastreams/role_metadata_ds.rb +5 -5
  19. data/lib/dor/datastreams/simple_dublin_core_ds.rb +13 -14
  20. data/lib/dor/datastreams/version_metadata_ds.rb +22 -23
  21. data/lib/dor/datastreams/workflow_definition_ds.rb +15 -15
  22. data/lib/dor/datastreams/workflow_ds.rb +64 -70
  23. data/lib/dor/exceptions.rb +0 -1
  24. data/lib/dor/migrations/identifiable/uriify_augmented_contentlocation_refs.rb +4 -4
  25. data/lib/dor/migrations/processable/unify_workflows.rb +1 -1
  26. data/lib/dor/models/admin_policy_object.rb +4 -4
  27. data/lib/dor/models/assembleable.rb +2 -3
  28. data/lib/dor/models/collection.rb +1 -1
  29. data/lib/dor/models/contentable.rb +113 -108
  30. data/lib/dor/models/describable.rb +136 -95
  31. data/lib/dor/models/editable.rb +205 -119
  32. data/lib/dor/models/embargoable.rb +16 -16
  33. data/lib/dor/models/eventable.rb +2 -2
  34. data/lib/dor/models/geoable.rb +3 -3
  35. data/lib/dor/models/governable.rb +25 -26
  36. data/lib/dor/models/identifiable.rb +66 -55
  37. data/lib/dor/models/item.rb +0 -1
  38. data/lib/dor/models/itemizable.rb +7 -8
  39. data/lib/dor/models/preservable.rb +7 -8
  40. data/lib/dor/models/processable.rb +76 -73
  41. data/lib/dor/models/publishable.rb +25 -30
  42. data/lib/dor/models/releaseable.rb +118 -155
  43. data/lib/dor/models/rightsable.rb +2 -3
  44. data/lib/dor/models/set.rb +1 -1
  45. data/lib/dor/models/shelvable.rb +8 -10
  46. data/lib/dor/models/upgradable.rb +5 -6
  47. data/lib/dor/models/versionable.rb +3 -4
  48. data/lib/dor/models/workflow_object.rb +15 -16
  49. data/lib/dor/services/cleanup_reset_service.rb +15 -16
  50. data/lib/dor/services/cleanup_service.rb +2 -4
  51. data/lib/dor/services/digital_stacks_service.rb +10 -13
  52. data/lib/dor/services/merge_service.rb +8 -9
  53. data/lib/dor/services/metadata_handlers/catalog_handler.rb +1 -1
  54. data/lib/dor/services/metadata_handlers/mdtoolkit_handler.rb +3 -3
  55. data/lib/dor/services/metadata_service.rb +19 -20
  56. data/lib/dor/services/registration_service.rb +80 -61
  57. data/lib/dor/services/reset_workspace_service.rb +6 -10
  58. data/lib/dor/services/sdr_ingest_service.rb +15 -16
  59. data/lib/dor/services/search_service.rb +18 -23
  60. data/lib/dor/services/suri_service.rb +6 -6
  61. data/lib/dor/services/technical_metadata_service.rb +27 -44
  62. data/lib/dor/utils/ng_tidy.rb +3 -3
  63. data/lib/dor/utils/sdr_client.rb +2 -3
  64. data/lib/dor/utils/solr_doc_helper.rb +1 -3
  65. data/lib/dor/version.rb +1 -1
  66. data/lib/dor/workflow/document.rb +43 -40
  67. data/lib/dor/workflow/graph.rb +26 -26
  68. data/lib/dor/workflow/process.rb +34 -35
  69. data/lib/tasks/rdoc.rake +5 -5
  70. metadata +129 -111
  71. data/lib/dor/models/presentable.rb +0 -146
@@ -2,7 +2,6 @@ require 'json'
2
2
  require 'active_support/core_ext'
3
3
 
4
4
  module Dor
5
-
6
5
  class SearchService
7
6
 
8
7
  RISEARCH_TEMPLATE = "select $object from <#ri> where $object <dc:identifier> '%s'"
@@ -27,19 +26,19 @@ module Dor
27
26
  client = Config.fedora.client['risearch']
28
27
  client.options[:timeout] = opts.delete(:timeout)
29
28
  query_params = {
30
- :type => 'tuples',
31
- :lang => 'itql',
29
+ :type => 'tuples',
30
+ :lang => 'itql',
32
31
  :format => 'CSV',
33
- :limit => '1000',
32
+ :limit => '1000',
34
33
  :stream => 'on',
35
- :query => query
34
+ :query => query
36
35
  }.merge(opts)
37
36
  result = client.post(query_params)
38
- result.split(/\n/)[1..-1].collect { |pid| pid.chomp.sub(/^info:fedora\//,'') }
37
+ result.split(/\n/)[1..-1].collect { |pid| pid.chomp.sub(/^info:fedora\//, '') }
39
38
  end
40
39
 
41
40
  def iterate_over_pids(opts = {}, &block)
42
- opts[:query] ||= "select $object from <#ri> where $object <info:fedora/fedora-system:def/model#label> $label"
41
+ opts[:query] ||= 'select $object from <#ri> where $object <info:fedora/fedora-system:def/model#label> $label'
43
42
  opts[:in_groups_of] ||= 100
44
43
  opts[:mode] ||= :single
45
44
  start = 0
@@ -58,30 +57,27 @@ module Dor
58
57
  def gsearch(params)
59
58
  client = Config.gsearch.client
60
59
  query_params = params.merge(:wt => 'json')
61
- query_string = query_params.collect { |k,v|
60
+ query_string = query_params.collect { |k, v|
62
61
  if v.is_a?(Array)
63
62
  v.collect { |vv| "#{k}=#{URI.encode(vv.to_s)}" }.join('&')
64
63
  else
65
64
  "#{k}=#{URI.encode(v.to_s)}"
66
65
  end
67
66
  }.join('&')
68
- result = JSON.parse(client["select?#{query_string}"].get)
67
+ JSON.parse(client["select?#{query_string}"].get)
69
68
  end
70
69
 
71
- def query query, args={}
70
+ def query(query, args = {})
72
71
  params = args.merge({ :q => query })
73
72
  params[:start] ||= 0
74
73
  resp = solr.find params
75
- if block_given?
76
- cont = true
77
- while cont && resp.docs.length > 0
78
- cont = yield(resp)
79
- params[:rows] ||= resp.docs.length
80
- params[:start] += params[:rows]
81
- resp = solr.find params
82
- end
83
- else
84
- return resp
74
+ return resp unless block_given?
75
+ cont = true
76
+ while cont && resp.docs.length > 0
77
+ cont = yield(resp)
78
+ params[:rows] ||= resp.docs.length
79
+ params[:start] += params[:rows]
80
+ resp = solr.find params
85
81
  end
86
82
  end
87
83
 
@@ -91,9 +87,9 @@ module Dor
91
87
  elsif id.is_a?(Array) # Two values: [ 'google', 'STANFORD_0123456789' ]
92
88
  id = id.join(':')
93
89
  end
94
- q = %{#{Solrizer.solr_name 'identifier', :stored_searchable}:"#{id}"}
90
+ q = %(#{Solrizer.solr_name 'identifier', :stored_searchable}:"#{id}")
95
91
  result = []
96
- resp = query(q, :fl => 'id', :rows => 1000) do |resp|
92
+ query(q, :fl => 'id', :rows => 1000) do |resp|
97
93
  result += resp.docs.collect { |doc| doc['id'] }
98
94
  true
99
95
  end
@@ -122,5 +118,4 @@ module Dor
122
118
  end
123
119
 
124
120
  end
125
-
126
121
  end
@@ -6,12 +6,12 @@ module Dor
6
6
  # If Dor::Config.suri.mint_ids is set to true, then this method
7
7
  # returns Config.suri.id_namespace:id_from_suri
8
8
  # Throws an exception if there were any problems
9
- def self.mint_id quantity=nil
9
+ def self.mint_id(quantity = nil)
10
10
  want_array = quantity.is_a?(Numeric)
11
11
  quantity = 1 if quantity.nil?
12
12
  ids = []
13
13
  if Config.suri.mint_ids
14
- #Post with no body
14
+ # Post with no body
15
15
  resource = RestClient::Resource.new("#{Config.suri.url}/suri2/namespaces/#{Config.suri.id_namespace}",
16
16
  :user => Config.suri.user, :password => Config.suri.pass)
17
17
  ids = resource["identifiers?quantity=#{quantity}"].post('').chomp.split(/\n/).collect { |id| "#{Config.suri.id_namespace}:#{id.strip}" }
@@ -20,11 +20,11 @@ module Dor
20
20
  resp = Nokogiri::XML(repo.next_pid :numPIDs => quantity)
21
21
  ids = resp.xpath('/pidList/pid').collect { |node| node.text }
22
22
  end
23
- return want_array ? ids : ids.first
23
+ want_array ? ids : ids.first
24
24
 
25
- # rescue Exception => e
26
- # Rails.logger.error("Unable to mint id from suri: #{e.to_s}")
27
- # raise e
25
+ # rescue Exception => e
26
+ # Rails.logger.error("Unable to mint id from suri: #{e.to_s}")
27
+ # raise e
28
28
  end
29
29
 
30
30
  end
@@ -4,7 +4,6 @@ require 'jhove_service'
4
4
  require 'dor-services'
5
5
 
6
6
  module Dor
7
-
8
7
  class TechnicalMetadataService
9
8
 
10
9
  # @param [Dor::Item] dor_item The DOR item being processed by the technical metadata robot
@@ -25,9 +24,9 @@ module Dor
25
24
  return true
26
25
  else
27
26
  merged_nodes = merge_file_nodes(old_techmd, new_techmd, deltas)
28
- final_techmd = build_technical_metadata(druid,merged_nodes)
27
+ final_techmd = build_technical_metadata(druid, merged_nodes)
29
28
  end
30
- ds = dor_item.datastreams["technicalMetadata"]
29
+ ds = dor_item.datastreams['technicalMetadata']
31
30
  ds.dsLabel = 'Technical Metadata'
32
31
  ds.content = final_techmd
33
32
  ds.save
@@ -41,7 +40,7 @@ module Dor
41
40
  require 'jhove_service'
42
41
  rescue LoadError => e
43
42
  puts e.inspect
44
- raise "jhove-service dependency gem was not found. Please add it to your Gemfile and run bundle install"
43
+ raise 'jhove-service dependency gem was not found. Please add it to your Gemfile and run bundle install'
45
44
  end
46
45
  end
47
46
  end
@@ -51,15 +50,13 @@ module Dor
51
50
  def self.get_content_group_diff(dor_item)
52
51
  inventory_diff_xml = dor_item.get_content_diff('all')
53
52
  inventory_diff = Moab::FileInventoryDifference.parse(inventory_diff_xml)
54
- content_group_diff = inventory_diff.group_difference("content")
55
- content_group_diff
53
+ inventory_diff.group_difference('content')
56
54
  end
57
55
 
58
56
  # @param [FileGroupDifference] content_group_diff
59
57
  # @return [Hash<Symbol,Array>] Sets of filenames grouped by change type for use in performing file or metadata operations
60
58
  def self.get_file_deltas(content_group_diff)
61
- deltas = content_group_diff.file_deltas
62
- deltas
59
+ content_group_diff.file_deltas
63
60
  end
64
61
 
65
62
  # @param [Hash<Symbol,Array>] deltas Sets of filenames grouped by change type for use in performing file or metadata operations
@@ -81,36 +78,25 @@ module Dor
81
78
  # The data is updated to the latest format.
82
79
  def self.get_sdr_technical_metadata(druid)
83
80
  begin
84
- sdr_techmd = get_sdr_metadata(druid, "technicalMetadata")
85
- rescue RestClient::ResourceNotFound => e
86
- return nil
87
- end
88
- if sdr_techmd =~ /<technicalMetadata/
89
- return sdr_techmd
90
- elsif sdr_techmd =~ /<jhove/
91
- return ::JhoveService.new.upgrade_technical_metadata(sdr_techmd)
92
- else
81
+ sdr_techmd = get_sdr_metadata(druid, 'technicalMetadata')
82
+ rescue RestClient::ResourceNotFound
93
83
  return nil
94
84
  end
85
+ return sdr_techmd if sdr_techmd =~ /<technicalMetadata/
86
+ return ::JhoveService.new.upgrade_technical_metadata(sdr_techmd) if sdr_techmd =~ /<jhove/
87
+ nil
95
88
  end
96
89
 
97
90
  # @param [Dor::Item] dor_item The DOR item being processed by the technical metadata robot
98
91
  # @return [String] The technicalMetadata datastream from the previous version of the digital object (fetched from DOR fedora).
99
92
  # The data is updated to the latest format.
100
93
  def self.get_dor_technical_metadata(dor_item)
101
- ds = "technicalMetadata"
102
- if dor_item.datastreams.keys.include?(ds) && !dor_item.datastreams[ds].new?
103
- dor_techmd = dor_item.datastreams[ds].content
104
- else
105
- return nil
106
- end
107
- if dor_techmd =~ /<technicalMetadata/
108
- return dor_techmd
109
- elsif dor_techmd =~ /<jhove/
110
- return ::JhoveService.new.upgrade_technical_metadata(dor_techmd)
111
- else
112
- return nil
113
- end
94
+ ds = 'technicalMetadata'
95
+ return nil unless dor_item.datastreams.keys.include?(ds) && !dor_item.datastreams[ds].new?
96
+ dor_techmd = dor_item.datastreams[ds].content
97
+ return dor_techmd if dor_techmd =~ /<technicalMetadata/
98
+ return ::JhoveService.new.upgrade_technical_metadata(dor_techmd) if dor_techmd =~ /<jhove/
99
+ nil
114
100
  end
115
101
 
116
102
  # @param [String] druid The identifier of the digital object being processed by the technical metadata robot
@@ -119,8 +105,7 @@ module Dor
119
105
  def self.get_sdr_metadata(druid, dsname)
120
106
  sdr_client = Dor::Config.sdr.rest_client
121
107
  url = "objects/#{druid}/metadata/#{dsname}.xml"
122
- response = sdr_client[url].get
123
- response
108
+ sdr_client[url].get
124
109
  end
125
110
 
126
111
  # @param [DruidTools::Druid] druid A wrapper class for the druid identifier. Used to generate paths
@@ -129,10 +114,10 @@ module Dor
129
114
  def self.get_new_technical_metadata(druid, new_files)
130
115
  return nil if new_files.nil? || new_files.empty?
131
116
  workspace = DruidTools::Druid.new(druid, Dor::Config.sdr.local_workspace_root)
132
- content_dir = workspace.find_filelist_parent('content',new_files)
117
+ content_dir = workspace.find_filelist_parent('content', new_files)
133
118
  temp_dir = workspace.temp_dir
134
119
  jhove_service = ::JhoveService.new(temp_dir)
135
- jhove_service.digital_object_id=druid
120
+ jhove_service.digital_object_id = druid
136
121
  fileset_file = write_fileset(temp_dir, new_files)
137
122
  jhove_output_file = jhove_service.run_jhove(content_dir, fileset_file)
138
123
  tech_md_file = jhove_service.create_technical_metadata(jhove_output_file)
@@ -155,7 +140,7 @@ module Dor
155
140
  def self.merge_file_nodes(old_techmd, new_techmd, deltas)
156
141
  old_file_nodes = get_file_nodes(old_techmd)
157
142
  new_file_nodes = get_file_nodes(new_techmd)
158
- merged_nodes = Hash.new
143
+ merged_nodes = {}
159
144
  deltas[:identical].each do |path|
160
145
  merged_nodes[path] = old_file_nodes[path]
161
146
  end
@@ -165,12 +150,12 @@ module Dor
165
150
  deltas[:added].each do |path|
166
151
  merged_nodes[path] = new_file_nodes[path]
167
152
  end
168
- deltas[:renamed].each do |oldpath,newpath|
153
+ deltas[:renamed].each do |oldpath, newpath|
169
154
  clone = old_file_nodes[oldpath].clone
170
155
  clone.sub!(/<file\s*id.*?["'].*?["'].*?>/, "<file id='#{newpath}'>")
171
156
  merged_nodes[newpath] = clone
172
157
  end
173
- deltas[:copyadded].each do |oldpath,newpath|
158
+ deltas[:copyadded].each do |oldpath, newpath|
174
159
  clone = old_file_nodes[oldpath].clone
175
160
  clone.sub!(/<file\s*id.*?["'].*?["'].*?>/, "<file id='#{newpath}'>")
176
161
  merged_nodes[newpath] = clone
@@ -179,11 +164,11 @@ module Dor
179
164
  end
180
165
 
181
166
  # @param [String] technical_metadata A technicalMetadata datastream contents
182
- # @return [Hash<String,Nokogiri::XML::Node>] The set of nodes from a technicalMetadata datastream , indexed by filename
167
+ # @return [Hash<String,Nokogiri::XML::Node>] The set of nodes from a technicalMetadata datastream, indexed by filename
183
168
  def self.get_file_nodes(technical_metadata)
184
- file_hash = Hash.new
169
+ file_hash = {}
185
170
  return file_hash if technical_metadata.nil?
186
- current_file = Array.new
171
+ current_file = []
187
172
  path = nil
188
173
  in_file = false
189
174
  technical_metadata.each_line do |line|
@@ -194,7 +179,7 @@ module Dor
194
179
  elsif line =~ /^\s*<\/file>/
195
180
  current_file << line
196
181
  file_hash[path] = current_file.join
197
- current_file = Array.new
182
+ current_file = []
198
183
  path = nil
199
184
  in_file = false
200
185
  elsif in_file
@@ -216,10 +201,8 @@ module Dor
216
201
  EOF
217
202
  doc = techmd_root
218
203
  merged_nodes.keys.sort.each {|path| doc << merged_nodes[path] }
219
- doc << "</technicalMetadata>"
220
- doc
204
+ doc + '</technicalMetadata>'
221
205
  end
222
206
 
223
207
  end
224
-
225
208
  end
@@ -1,11 +1,11 @@
1
1
  class Nokogiri::XML::Text
2
2
 
3
3
  def normalize
4
- self.content =~ /\S/ ? self.content.gsub(/\s+/,' ').strip : self.content
4
+ content =~ /\S/ ? content.gsub(/\s+/, ' ').strip : content
5
5
  end
6
6
 
7
7
  def normalize!
8
- self.content = self.normalize
8
+ self.content = normalize
9
9
  end
10
10
 
11
11
  end
@@ -13,7 +13,7 @@ end
13
13
  class Nokogiri::XML::Node
14
14
 
15
15
  def normalize_text!
16
- self.xpath('//text()').each { |t| t.normalize! }
16
+ xpath('//text()').each { |t| t.normalize! }
17
17
  end
18
18
 
19
19
  end
@@ -1,11 +1,10 @@
1
1
  module Sdr
2
-
3
2
  module Client
4
3
  class << self
5
4
 
6
5
  # @param [String] druid id of the object you want the version of
7
6
  # @return [Integer] the current version from SDR
8
- def current_version druid
7
+ def current_version(druid)
9
8
  sdr_client = Dor::Config.sdr.rest_client
10
9
  xml = sdr_client["objects/#{druid}/current_version"].get
11
10
 
@@ -13,7 +12,7 @@ module Sdr
13
12
  doc = Nokogiri::XML xml
14
13
  raise if doc.root.name != 'currentVersion'
15
14
  return Integer(doc.text)
16
- rescue => e
15
+ rescue
17
16
  raise "Unable to parse XML from SDR current_version API call: #{xml}"
18
17
  end
19
18
  end
@@ -1,7 +1,6 @@
1
1
  module SolrDocHelper
2
-
3
2
  def add_solr_value(solr_doc, field_name, value, field_type = :default, index_types = [:searchable])
4
- if Solrizer::VERSION > "3"
3
+ if Solrizer::VERSION > '3'
5
4
  case field_type
6
5
  when :symbol
7
6
  index_types << field_type
@@ -13,5 +12,4 @@ module SolrDocHelper
13
12
  }
14
13
  end
15
14
  end
16
-
17
15
  end
data/lib/dor/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Dor
2
- VERSION = '5.2.0'
2
+ VERSION = '5.3.0'
3
3
  end
@@ -6,23 +6,26 @@ module Workflow
6
6
 
7
7
  set_terminology do |t|
8
8
  t.root(:path => 'workflow')
9
- t.repository(:path=>{:attribute => "repository"})
10
- t.workflowId(:path=>{:attribute => "id"})
9
+ t.repository(:path => {:attribute => 'repository'})
10
+ t.workflowId(:path => {:attribute => 'id'})
11
11
  t.process {
12
- t.name_(:path=>{:attribute=>"name"})
13
- t.status(:path=>{:attribute=>"status"})
14
- t.timestamp(:path=>{:attribute=>"datetime"}) #, :data_type => :date)
15
- t.elapsed(:path=>{:attribute=>"elapsed"})
16
- t.lifecycle(:path=>{:attribute=>"lifecycle"})
17
- t.attempts(:path=>{:attribute=>"attempts"}, :index_as => [:not_searchable])
18
- t.version(:path=>{:attribute=>"version"})
12
+ t.name_(:path => {:attribute => 'name'})
13
+ t.status(:path => {:attribute => 'status'})
14
+ t.timestamp(:path => {:attribute => 'datetime'}) # , :data_type => :date)
15
+ t.elapsed(:path => {:attribute => 'elapsed'})
16
+ t.lifecycle(:path => {:attribute => 'lifecycle'})
17
+ t.attempts(:path => {:attribute => 'attempts'}, :index_as => [:not_searchable])
18
+ t.version(:path => {:attribute => 'version'})
19
19
  }
20
20
  end
21
- @@definitions={}
22
- def initialize node
21
+
22
+ @@definitions = {}
23
+
24
+ def initialize(node)
23
25
  self.ng_xml = Nokogiri::XML(node)
24
26
  end
25
- #is this an incomplete workflow with steps that have a priority > 0
27
+
28
+ # is this an incomplete workflow with steps that have a priority > 0
26
29
  def expedited?
27
30
  processes.any? { |proc| !proc.completed? && proc.priority.to_i > 0 }
28
31
  end
@@ -34,75 +37,75 @@ module Workflow
34
37
 
35
38
  # @return [Boolean] if any process node does not have version, returns true, false otherwise (all processes have version)
36
39
  def active?
37
- ng_xml.at_xpath("/workflow/process[not(@version)]") ? true : false
40
+ ng_xml.at_xpath('/workflow/process[not(@version)]') ? true : false
38
41
  end
39
42
 
40
43
  def definition
41
44
  @definition ||= begin
42
- if @@definitions.key? self.workflowId.first
43
- @@definitions[self.workflowId.first]
45
+ if @@definitions.key? workflowId.first
46
+ @@definitions[workflowId.first]
44
47
  else
45
- wfo = Dor::WorkflowObject.find_by_name(self.workflowId.first)
46
- wf_def=wfo ? wfo.definition : nil
47
- @@definitions[self.workflowId.first] = wf_def
48
+ wfo = Dor::WorkflowObject.find_by_name(workflowId.first)
49
+ wf_def = wfo ? wfo.definition : nil
50
+ @@definitions[workflowId.first] = wf_def
48
51
  wf_def
49
52
  end
50
53
  end
51
54
  end
52
55
 
53
- def graph(parent=nil, dir=nil)
54
- wf_definition = self.definition
55
- result = wf_definition ? Workflow::Graph.from_processes(wf_definition.repo, wf_definition.name, self.processes, parent) : nil
56
+ def graph(parent = nil, dir = nil)
57
+ wf_definition = definition
58
+ result = wf_definition ? Workflow::Graph.from_processes(wf_definition.repo, wf_definition.name, processes, parent) : nil
56
59
  result['rankdir'] = dir || 'TB' unless result.nil?
57
60
  result
58
61
  end
59
62
 
60
63
  def [](value)
61
- self.processes.find { |p| p.name == value }
64
+ processes.find { |p| p.name == value }
62
65
  end
63
66
 
64
67
  def processes
65
- #if the workflow service didnt return any processes, dont return any processes from the reified wf
66
- return [] if ng_xml.search("/workflow/process").length == 0
68
+ # if the workflow service didnt return any processes, dont return any processes from the reified wf
69
+ return [] if ng_xml.search('/workflow/process').length == 0
67
70
  @processes ||=
68
- if self.definition
69
- self.definition.processes.collect do |process|
71
+ if definition
72
+ definition.processes.collect do |process|
70
73
  node = ng_xml.at("/workflow/process[@name = '#{process.name}']")
71
- process.update!(node,self) unless node.nil?
74
+ process.update!(node, self) unless node.nil?
72
75
  process
73
76
  end
74
77
  else
75
- self.find_by_terms(:workflow, :process).collect do |x|
76
- pnode = Dor::Workflow::Process.new(self.repository, self.workflowId, {})
77
- pnode.update!(x,self)
78
+ find_by_terms(:workflow, :process).collect do |x|
79
+ pnode = Dor::Workflow::Process.new(repository, workflowId, {})
80
+ pnode.update!(x, self)
78
81
  pnode
79
82
  end.sort_by(&:datetime)
80
83
  end
81
84
  end
82
85
 
83
- def workflow_should_show_completed? processes
84
- return processes.all?{|p| ['skipped', 'completed', '', nil].include?(p.status)}
86
+ def workflow_should_show_completed?(processes)
87
+ processes.all? {|p| ['skipped', 'completed', '', nil].include?(p.status)}
85
88
  end
86
89
 
87
- def to_solr(solr_doc=Hash.new, *args)
88
- wf_name = self.workflowId.first
89
- repo = self.repository.first
90
+ def to_solr(solr_doc = {}, *args)
91
+ wf_name = workflowId.first
92
+ repo = repository.first
90
93
  wf_solr_type = :string
91
94
  wf_solr_attrs = [:symbol]
92
95
  add_solr_value(solr_doc, 'wf', wf_name, wf_solr_type, wf_solr_attrs)
93
96
  add_solr_value(solr_doc, 'wf_wps', wf_name, wf_solr_type, wf_solr_attrs)
94
97
  add_solr_value(solr_doc, 'wf_wsp', wf_name, wf_solr_type, wf_solr_attrs)
95
98
  status = processes.empty? ? 'empty' : (workflow_should_show_completed?(processes) ? 'completed' : 'active')
96
- errors = processes.select(&:error?).count
97
- add_solr_value(solr_doc, 'workflow_status', [wf_name,status,errors,repo].join('|'), wf_solr_type, wf_solr_attrs)
99
+ errors = processes.count(&:error?)
100
+ add_solr_value(solr_doc, 'workflow_status', [wf_name, status, errors, repo].join('|'), wf_solr_type, wf_solr_attrs)
98
101
 
99
102
  processes.each do |process|
100
103
  next unless process.status.present?
101
- #add a record of the robot having operated on this item, so we can track robot activity
104
+ # add a record of the robot having operated on this item, so we can track robot activity
102
105
  if process.date_time && process.status && (process.status == 'completed' || process.status == 'error')
103
106
  solr_doc["wf_#{wf_name}_#{process.name}_dttsi"] = "#{process.date_time}Z"
104
107
  end
105
- add_solr_value(solr_doc, 'wf_error', "#{wf_name}:#{process.name}:#{process.error_message}", wf_solr_type, wf_solr_attrs) if process.error_message #index the error message without the druid so we hopefully get some overlap
108
+ add_solr_value(solr_doc, 'wf_error', "#{wf_name}:#{process.name}:#{process.error_message}", wf_solr_type, wf_solr_attrs) if process.error_message # index the error message without the druid so we hopefully get some overlap
106
109
  add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.status}", wf_solr_type, wf_solr_attrs)
107
110
  add_solr_value(solr_doc, 'wf_wsp', "#{wf_name}:#{process.status}:#{process.name}", wf_solr_type, wf_solr_attrs)
108
111
  add_solr_value(solr_doc, 'wf_wps', "#{wf_name}:#{process.name}", wf_solr_type, wf_solr_attrs)
@@ -127,7 +130,7 @@ module Workflow
127
130
  end
128
131
 
129
132
  def inspect
130
- "#<#{self.class.name}:#{self.object_id}>"
133
+ "#<#{self.class.name}:#{object_id}>"
131
134
  end
132
135
  end
133
136
  end