dor-services 6.0.0 → 6.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/dor-services.rb +7 -6
- data/lib/dor/certificate_authenticated_rest_resource_factory.rb +2 -1
- data/lib/dor/config.rb +38 -37
- data/lib/dor/datastreams/administrative_metadata_ds.rb +98 -98
- data/lib/dor/datastreams/content_metadata_ds.rb +26 -17
- data/lib/dor/datastreams/datastream_spec_solrizer.rb +4 -2
- data/lib/dor/datastreams/default_object_rights_ds.rb +10 -7
- data/lib/dor/datastreams/desc_metadata_ds.rb +6 -6
- data/lib/dor/datastreams/embargo_metadata_ds.rb +94 -94
- data/lib/dor/datastreams/events_ds.rb +55 -54
- data/lib/dor/datastreams/geo_metadata_ds.rb +7 -6
- data/lib/dor/datastreams/identity_metadata_ds.rb +128 -125
- data/lib/dor/datastreams/provenance_metadata_ds.rb +3 -1
- data/lib/dor/datastreams/rights_metadata_ds.rb +4 -4
- data/lib/dor/datastreams/role_metadata_ds.rb +42 -42
- data/lib/dor/datastreams/simple_dublin_core_ds.rb +45 -43
- data/lib/dor/datastreams/technical_metadata_ds.rb +3 -1
- data/lib/dor/datastreams/version_metadata_ds.rb +12 -5
- data/lib/dor/datastreams/workflow_definition_ds.rb +74 -73
- data/lib/dor/datastreams/workflow_ds.rb +12 -7
- data/lib/dor/exceptions.rb +2 -0
- data/lib/dor/indexers/data_indexer.rb +16 -0
- data/lib/dor/indexers/describable_indexer.rb +2 -0
- data/lib/dor/indexers/editable_indexer.rb +2 -0
- data/lib/dor/indexers/identifiable_indexer.rb +23 -8
- data/lib/dor/indexers/processable_indexer.rb +2 -0
- data/lib/dor/indexers/releasable_indexer.rb +2 -0
- data/lib/dor/models/abstract.rb +2 -0
- data/lib/dor/models/admin_policy_object.rb +2 -0
- data/lib/dor/models/agreement.rb +2 -0
- data/lib/dor/models/collection.rb +3 -0
- data/lib/dor/models/concerns/assembleable.rb +2 -0
- data/lib/dor/models/concerns/contentable.rb +5 -2
- data/lib/dor/models/concerns/describable.rb +7 -2
- data/lib/dor/models/concerns/editable.rb +28 -21
- data/lib/dor/models/concerns/embargoable.rb +4 -0
- data/lib/dor/models/concerns/eventable.rb +2 -0
- data/lib/dor/models/concerns/geoable.rb +2 -0
- data/lib/dor/models/concerns/governable.rb +7 -2
- data/lib/dor/models/concerns/identifiable.rb +33 -34
- data/lib/dor/models/concerns/itemizable.rb +4 -1
- data/lib/dor/models/concerns/preservable.rb +2 -0
- data/lib/dor/models/concerns/processable.rb +15 -9
- data/lib/dor/models/concerns/publishable.rb +9 -4
- data/lib/dor/models/concerns/releaseable.rb +21 -11
- data/lib/dor/models/concerns/rightsable.rb +2 -0
- data/lib/dor/models/concerns/shelvable.rb +6 -2
- data/lib/dor/models/concerns/versionable.rb +8 -4
- data/lib/dor/models/item.rb +2 -0
- data/lib/dor/models/set.rb +2 -0
- data/lib/dor/models/workflow_object.rb +5 -1
- data/lib/dor/rest_resource_factory.rb +2 -0
- data/lib/dor/services/cleanup_reset_service.rb +2 -1
- data/lib/dor/services/cleanup_service.rb +2 -1
- data/lib/dor/services/digital_stacks_service.rb +3 -1
- data/lib/dor/services/indexing_service.rb +3 -1
- data/lib/dor/services/merge_service.rb +6 -4
- data/lib/dor/services/metadata_handlers/catalog_handler.rb +2 -0
- data/lib/dor/services/metadata_service.rb +4 -4
- data/lib/dor/services/public_desc_metadata_service.rb +16 -8
- data/lib/dor/services/public_xml_service.rb +7 -4
- data/lib/dor/services/registration_service.rb +25 -20
- data/lib/dor/services/reset_workspace_service.rb +4 -4
- data/lib/dor/services/sdr_ingest_service.rb +4 -2
- data/lib/dor/services/search_service.rb +8 -9
- data/lib/dor/services/suri_service.rb +3 -2
- data/lib/dor/services/technical_metadata_service.rb +15 -9
- data/lib/dor/services/thumbnail_service.rb +14 -10
- data/lib/dor/utils/hydrus_shims.rb +2 -0
- data/lib/dor/utils/ng_tidy.rb +3 -7
- data/lib/dor/utils/predicate_patch.rb +2 -0
- data/lib/dor/utils/sdr_client.rb +3 -0
- data/lib/dor/utils/solr_doc_helper.rb +4 -2
- data/lib/dor/version.rb +3 -1
- data/lib/dor/workflow/document.rb +113 -108
- data/lib/dor/workflow/process.rb +90 -87
- data/lib/tasks/rdoc.rake +4 -3
- metadata +4 -4
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'equivalent-xml'
|
2
4
|
|
3
5
|
module Dor
|
@@ -25,15 +27,15 @@ module Dor
|
|
25
27
|
|
26
28
|
# milestones from accessioning and the order they happen in
|
27
29
|
STEPS = {
|
28
|
-
'registered'
|
29
|
-
'submitted'
|
30
|
-
'described'
|
31
|
-
'published'
|
32
|
-
'deposited'
|
30
|
+
'registered' => 1,
|
31
|
+
'submitted' => 2,
|
32
|
+
'described' => 3,
|
33
|
+
'published' => 4,
|
34
|
+
'deposited' => 5,
|
33
35
|
'accessioned' => 6,
|
34
|
-
'indexed'
|
35
|
-
'shelved'
|
36
|
-
'opened'
|
36
|
+
'indexed' => 7,
|
37
|
+
'shelved' => 8,
|
38
|
+
'opened' => 9
|
37
39
|
}.freeze
|
38
40
|
|
39
41
|
# This is a work-around for some strange logic in ActiveFedora that
|
@@ -42,12 +44,14 @@ module Dor
|
|
42
44
|
def set_workflows_datastream_location
|
43
45
|
return if self.respond_to?(:inner_object) && inner_object.is_a?(ActiveFedora::SolrDigitalObject)
|
44
46
|
return unless workflows.new?
|
47
|
+
|
45
48
|
workflows.mimeType = 'application/xml'
|
46
49
|
workflows.dsLocation = File.join(Dor::Config.workflow.url, "dor/objects/#{pid}/workflows")
|
47
50
|
end
|
48
51
|
|
49
52
|
def empty_datastream?(datastream)
|
50
53
|
return true if datastream.new?
|
54
|
+
|
51
55
|
if datastream.class.respond_to?(:xml_template)
|
52
56
|
datastream.content.to_s.empty? || EquivalentXml.equivalent?(datastream.content, datastream.class.xml_template)
|
53
57
|
else
|
@@ -90,6 +94,7 @@ module Dor
|
|
90
94
|
end
|
91
95
|
# Check for success.
|
92
96
|
raise "Required datastream #{datastream} could not be populated!" if is_required && empty_datastream?(ds)
|
97
|
+
|
93
98
|
ds
|
94
99
|
end
|
95
100
|
|
@@ -125,11 +130,12 @@ module Dor
|
|
125
130
|
m_name = m[:milestone]
|
126
131
|
m_time = m[:at].utc.xmlschema
|
127
132
|
next unless STEPS.keys.include?(m_name) && (!status_time || m_time > status_time)
|
133
|
+
|
128
134
|
status_code = STEPS[m_name]
|
129
135
|
status_time = m_time
|
130
136
|
end
|
131
137
|
|
132
|
-
{:current_version => current_version, :status_code => status_code, :status_time => status_time}
|
138
|
+
{ :current_version => current_version, :status_code => status_code, :status_time => status_time }
|
133
139
|
end
|
134
140
|
|
135
141
|
# @param [Boolean] include_time
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'dor/datastreams/content_metadata_ds'
|
2
4
|
require 'fileutils'
|
3
5
|
|
@@ -20,8 +22,9 @@ module Dor
|
|
20
22
|
def encoded_thumb
|
21
23
|
thumb_image = thumb # store the result locally, so we don't have to compute each time we use it below
|
22
24
|
return unless thumb_image
|
23
|
-
|
24
|
-
|
25
|
+
|
26
|
+
thumb_druid = thumb_image.split('/').first # the druid (before the first slash)
|
27
|
+
thumb_filename = thumb_image.split(/#{pid_regex}[\/]/).last # everything after the druid
|
25
28
|
"#{thumb_druid}%2F#{ERB::Util.url_encode(thumb_filename)}"
|
26
29
|
end
|
27
30
|
deprecation_deprecate :encoded_thumb
|
@@ -30,7 +33,8 @@ module Dor
|
|
30
33
|
# @return [String] fully qualified image URL for the computed thumbnail, e.g. https://stacks.stanford.edu/image/iiif/oo000oo0001%2Ffilenamewith%20space/full
|
31
34
|
def thumb_url
|
32
35
|
return unless encoded_thumb
|
33
|
-
|
36
|
+
|
37
|
+
thumb_basename = File.basename(encoded_thumb, File.extname(encoded_thumb)) # strip the extension for URL generation
|
34
38
|
"https://#{Dor::Config.stacks.host}/image/iiif/#{thumb_basename}/full/!400,400/0/default.jpg"
|
35
39
|
end
|
36
40
|
deprecation_deprecate :thumb_url
|
@@ -60,7 +64,7 @@ module Dor
|
|
60
64
|
def publish_metadata
|
61
65
|
rights = datastreams['rightsMetadata'].ng_xml.clone.remove_namespaces!
|
62
66
|
if rights.at_xpath("//rightsMetadata/access[@type='discover']/machine/world")
|
63
|
-
dc_xml = generate_dublin_core.to_xml {|config| config.no_declaration}
|
67
|
+
dc_xml = generate_dublin_core.to_xml { |config| config.no_declaration }
|
64
68
|
DigitalStacksService.transfer_to_document_store(pid, dc_xml, 'dc')
|
65
69
|
%w(identityMetadata contentMetadata rightsMetadata).each do |stream|
|
66
70
|
DigitalStacksService.transfer_to_document_store(pid, datastreams[stream].content.to_s, stream) if datastreams[stream]
|
@@ -111,6 +115,7 @@ module Dor
|
|
111
115
|
# When publishing a PURL, we notify purl-fetcher of changes.
|
112
116
|
def publish_delete_on_success
|
113
117
|
return unless Dor::Config.purl_services.url
|
118
|
+
|
114
119
|
id = pid.gsub(/^druid:/, '')
|
115
120
|
|
116
121
|
purl_services = Dor::Config.purl_services.rest_client
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'open-uri'
|
2
4
|
require 'retries'
|
3
5
|
|
@@ -31,21 +33,22 @@ module Dor
|
|
31
33
|
# Get the most recent self tag for all targets and retain their result since most recent self always trumps any other non self tags
|
32
34
|
latest_self_tags = get_newest_release_tag get_self_release_tags(release_nodes)
|
33
35
|
latest_self_tags.each do |key, payload|
|
34
|
-
released_hash[key] = {'release' => payload['release']}
|
36
|
+
released_hash[key] = { 'release' => payload['release'] }
|
35
37
|
end
|
36
38
|
|
37
39
|
# With Self Tags resolved we now need to deal with tags on all sets this object is part of.
|
38
40
|
# Get all release tags on the item and strip out the what = self ones, we've already processed all the self tags on this item.
|
39
41
|
# This will be where we store all tags that apply, regardless of their timestamp:
|
40
42
|
potential_applicable_release_tags = get_tags_for_what_value(get_release_tags_for_item_and_all_governing_sets, 'collection')
|
41
|
-
administrative_tags = tags
|
43
|
+
administrative_tags = tags # Get admin tags once here and pass them down
|
42
44
|
|
43
45
|
# We now have the keys for all potential releases, we need to check the tags: the most recent timestamp with an explicit true or false wins.
|
44
46
|
# In a nil case, the lack of an explicit false tag we do nothing.
|
45
|
-
(potential_applicable_release_tags.keys - released_hash.keys).each do |key|
|
47
|
+
(potential_applicable_release_tags.keys - released_hash.keys).each do |key| # don't bother checking if already added to the release hash, they were added due to a self tag so that has won
|
46
48
|
latest_tag = latest_applicable_release_tag_in_array(potential_applicable_release_tags[key], administrative_tags)
|
47
49
|
next if latest_tag.nil? # Otherwise, we have a valid tag, record it
|
48
|
-
|
50
|
+
|
51
|
+
released_hash[key] = { 'release' => latest_tag['release'] }
|
49
52
|
end
|
50
53
|
|
51
54
|
# See what the application is currently released for on Purl. If released in purl but not listed here, it needs to be added as a false
|
@@ -65,7 +68,8 @@ module Dor
|
|
65
68
|
def get_release_tags_for_item_and_all_governing_sets
|
66
69
|
return_tags = release_nodes || {}
|
67
70
|
collections.each do |collection|
|
68
|
-
next if collection.id == id
|
71
|
+
next if collection.id == id # recursive, so parents of parents are found, but we need to avoid an infinite loop if the collection references itself (i.e. bad data)
|
72
|
+
|
69
73
|
return_tags = combine_two_release_tag_hashes(return_tags, collection.get_release_tags_for_item_and_all_governing_sets)
|
70
74
|
end
|
71
75
|
return_tags
|
@@ -90,7 +94,7 @@ module Dor
|
|
90
94
|
def get_tags_for_what_value(tags, what_target)
|
91
95
|
return_hash = {}
|
92
96
|
tags.keys.each do |key|
|
93
|
-
self_tags = tags[key].select {|tag| tag['what'].casecmp(what_target) == 0}
|
97
|
+
self_tags = tags[key].select { |tag| tag['what'].casecmp(what_target) == 0 }
|
94
98
|
return_hash[key] = self_tags if self_tags.size > 0
|
95
99
|
end
|
96
100
|
return_hash
|
@@ -100,7 +104,7 @@ module Dor
|
|
100
104
|
# @param tags [Hash] a hash of tags obtained via Dor::Item.release_tags or matching format
|
101
105
|
# @return [Hash] a hash of latest tags for each to value
|
102
106
|
def get_newest_release_tag(tags)
|
103
|
-
Hash[tags.map {|key, val| [key, newest_release_tag_in_an_array(val)]}]
|
107
|
+
Hash[tags.map { |key, val| [key, newest_release_tag_in_an_array(val)] }]
|
104
108
|
end
|
105
109
|
|
106
110
|
# Takes an array of release tags and returns the most recent one
|
@@ -121,6 +125,7 @@ module Dor
|
|
121
125
|
def does_release_tag_apply(release_tag, admin_tags = false)
|
122
126
|
# Is the tag global or restricted
|
123
127
|
return true if release_tag['tag'].nil? # no specific tag specificied means this tag is global to all members of the collection
|
128
|
+
|
124
129
|
admin_tags = tags unless admin_tags # We use false instead of [], since an item can have no admin_tags at which point we'd be passing this var as [] and would not attempt to retrieve it
|
125
130
|
admin_tags.include?(release_tag['tag'])
|
126
131
|
end
|
@@ -138,6 +143,7 @@ module Dor
|
|
138
143
|
release_tags.slice!(release_tags.index(newest_tag))
|
139
144
|
|
140
145
|
return latest_applicable_release_tag_in_array(release_tags, admin_tags) if release_tags.size > 0 # Try again after dropping the inapplicable
|
146
|
+
|
141
147
|
nil # We're out of tags, no applicable ones
|
142
148
|
end
|
143
149
|
|
@@ -186,6 +192,7 @@ module Dor
|
|
186
192
|
# @return [Boolean] Returns true if no errors found
|
187
193
|
def valid_release_attributes_and_tag(tag, attrs = {})
|
188
194
|
raise ArgumentError, ':when is not iso8601' if attrs[:when].match('\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z').nil?
|
195
|
+
|
189
196
|
[:who, :to, :what].each do |check_attr|
|
190
197
|
raise ArgumentError, "#{check_attr} not supplied as a String" if attrs[check_attr].class != String
|
191
198
|
end
|
@@ -196,6 +203,7 @@ module Dor
|
|
196
203
|
end
|
197
204
|
raise ArgumentError, ':what must be self or collection' unless what_correct
|
198
205
|
raise ArgumentError, 'the value set for this tag is not a boolean' if !!tag != tag # rubocop:disable Style/DoubleNegation
|
206
|
+
|
199
207
|
true
|
200
208
|
end
|
201
209
|
|
@@ -211,7 +219,7 @@ module Dor
|
|
211
219
|
# @example
|
212
220
|
# item.add_release_node(true,{:what=>'self',:to=>'Searchworks',:who=>'petucket'})
|
213
221
|
def add_release_node(release, attrs = {})
|
214
|
-
allowed_release_attributes=[:what
|
222
|
+
allowed_release_attributes = [:what, :to, :who, :when] # any other release attributes sent in will be rejected and not stored
|
215
223
|
identity_metadata_ds = identityMetadata
|
216
224
|
attrs.delete_if { |key, value| !allowed_release_attributes.include?(key) }
|
217
225
|
attrs[:when] = Time.now.utc.iso8601 if attrs[:when].nil? # add the timestamp
|
@@ -227,11 +235,13 @@ module Dor
|
|
227
235
|
# @return [Boolean] Returns true if no errors found
|
228
236
|
def valid_release_attributes(tag, attrs = {})
|
229
237
|
raise ArgumentError, ':when is not iso8601' if attrs[:when].match('\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z').nil?
|
238
|
+
|
230
239
|
[:who, :to, :what].each do |check_attr|
|
231
240
|
raise ArgumentError, "#{check_attr} not supplied as a String" if attrs[check_attr].class != String
|
232
241
|
end
|
233
242
|
raise ArgumentError, ':what must be self or collection' unless %w(self collection).include? attrs[:what]
|
234
|
-
raise ArgumentError, 'the value set for this tag is not a boolean' unless [true,false].include? tag
|
243
|
+
raise ArgumentError, 'the value set for this tag is not a boolean' unless [true, false].include? tag
|
244
|
+
|
235
245
|
true
|
236
246
|
end
|
237
247
|
|
@@ -283,7 +293,7 @@ module Dor
|
|
283
293
|
def get_release_tags_from_purl_xml(doc)
|
284
294
|
nodes = doc.xpath('//html/body/publicobject/releasedata').children
|
285
295
|
# We only want the nodes with a name that isn't text
|
286
|
-
nodes.reject {|n| n.name.nil? || n.name.casecmp('text') == 0 }.map {|n| n.attr('to')}.uniq
|
296
|
+
nodes.reject { |n| n.name.nil? || n.name.casecmp('text') == 0 }.map { |n| n.attr('to') }.uniq
|
287
297
|
end
|
288
298
|
|
289
299
|
# Pull all release nodes from the public xml obtained via the purl query
|
@@ -300,7 +310,7 @@ module Dor
|
|
300
310
|
tags_currently_in_purl = get_release_tags_from_purl
|
301
311
|
missing_tags = tags_currently_in_purl.map(&:downcase) - new_tags.keys.map(&:downcase)
|
302
312
|
missing_tags.each do |missing_tag|
|
303
|
-
new_tags[missing_tag.capitalize] = {'release' => false}
|
313
|
+
new_tags[missing_tag.capitalize] = { 'release' => false }
|
304
314
|
end
|
305
315
|
new_tags
|
306
316
|
end
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'moab/stanford'
|
2
4
|
|
3
5
|
module Dor
|
@@ -35,8 +37,9 @@ module Dor
|
|
35
37
|
# @return [Pathname] The location of the object's content files in the workspace area
|
36
38
|
def workspace_content_dir(content_diff, workspace_druid)
|
37
39
|
deltas = content_diff.file_deltas
|
38
|
-
filelist = deltas[:modified] + deltas[:added] + deltas[:copyadded].collect {|old, new| new}
|
40
|
+
filelist = deltas[:modified] + deltas[:added] + deltas[:copyadded].collect { |old, new| new }
|
39
41
|
return nil if filelist.empty?
|
42
|
+
|
40
43
|
content_pathname = Pathname(workspace_druid.find_filelist_parent('content', filelist))
|
41
44
|
content_pathname
|
42
45
|
end
|
@@ -47,7 +50,8 @@ module Dor
|
|
47
50
|
contentMetadataDS = datastreams['contentMetadata']
|
48
51
|
unless contentMetadataDS.nil? || contentMetadataDS.stacks.length == 0
|
49
52
|
stacks_location = contentMetadataDS.stacks[0]
|
50
|
-
return stacks_location if stacks_location.start_with? '/'
|
53
|
+
return stacks_location if stacks_location.start_with? '/' # Absolute stacks path
|
54
|
+
|
51
55
|
raise 'stacks attribute for item: ' + id + ' contentMetadata should start with /. The current value is ' + stacks_location
|
52
56
|
end
|
53
57
|
Config.stacks.local_stacks_root # Default stacks
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
module Dor
|
2
4
|
module Versionable
|
3
5
|
extend ActiveSupport::Concern
|
@@ -35,8 +37,9 @@ module Dor
|
|
35
37
|
|
36
38
|
vmd_upd_info = opts[:vers_md_upd_info]
|
37
39
|
return unless vmd_upd_info
|
40
|
+
|
38
41
|
add_event('open', vmd_upd_info[:opening_user_name], "Version #{vmd_ds.current_version_id} opened")
|
39
|
-
vmd_ds.update_current_version({:description => vmd_upd_info[:description], :significance => vmd_upd_info[:significance].to_sym})
|
42
|
+
vmd_ds.update_current_version({ :description => vmd_upd_info[:description], :significance => vmd_upd_info[:significance].to_sym })
|
40
43
|
save
|
41
44
|
end
|
42
45
|
|
@@ -63,12 +66,13 @@ module Dor
|
|
63
66
|
raise Dor::Exception, 'Trying to close version on an object not opened for versioning' unless new_version_open?
|
64
67
|
raise Dor::Exception, 'accessionWF already created for versioned object' if Dor::Config.workflow.client.get_active_lifecycle('dor', pid, 'submitted')
|
65
68
|
|
66
|
-
Dor::Config.workflow.client.close_version 'dor', pid, opts.fetch(:start_accession, true)
|
69
|
+
Dor::Config.workflow.client.close_version 'dor', pid, opts.fetch(:start_accession, true) # Default to creating accessionWF when calling close_version
|
67
70
|
end
|
68
71
|
|
69
72
|
# @return [Boolean] true if 'opened' lifecycle is active, false otherwise
|
70
73
|
def new_version_open?
|
71
74
|
return true if Dor::Config.workflow.client.get_active_lifecycle('dor', pid, 'opened')
|
75
|
+
|
72
76
|
false
|
73
77
|
end
|
74
78
|
|
@@ -76,8 +80,8 @@ module Dor
|
|
76
80
|
# States that will allow modification are: has not been submitted for accessioning, has an open version or has sdr-ingest set to hold
|
77
81
|
def allows_modification?
|
78
82
|
if Dor::Config.workflow.client.get_lifecycle('dor', pid, 'submitted') &&
|
79
|
-
|
80
|
-
|
83
|
+
!new_version_open? &&
|
84
|
+
Dor::Config.workflow.client.get_workflow_status('dor', pid, 'accessionWF', 'sdr-ingest-transfer') != 'hold'
|
81
85
|
false
|
82
86
|
else
|
83
87
|
true
|
data/lib/dor/models/item.rb
CHANGED
data/lib/dor/models/set.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'dor/datastreams/workflow_definition_ds'
|
2
4
|
|
3
5
|
module Dor
|
@@ -26,6 +28,7 @@ module Dor
|
|
26
28
|
# @return [String] the initial workflow xml
|
27
29
|
def self.initial_workflow(name)
|
28
30
|
return @@xml_cache[name] if @@xml_cache.include?(name)
|
31
|
+
|
29
32
|
find_and_cache_workflow_xml_and_repo name
|
30
33
|
@@xml_cache[name]
|
31
34
|
end
|
@@ -36,6 +39,7 @@ module Dor
|
|
36
39
|
# @return [String] the initial workflow xml
|
37
40
|
def self.initial_repo(name)
|
38
41
|
return @@repo_cache[name] if @@repo_cache.include?(name)
|
42
|
+
|
39
43
|
find_and_cache_workflow_xml_and_repo name
|
40
44
|
@@repo_cache[name]
|
41
45
|
end
|
@@ -57,10 +61,10 @@ module Dor
|
|
57
61
|
def self.find_and_cache_workflow_xml_and_repo(name)
|
58
62
|
wobj = find_by_name(name)
|
59
63
|
raise "Failed to find workflow via find_by_name('#{name}')" if wobj.nil?
|
64
|
+
|
60
65
|
@@repo_cache[name] = wobj.definition.repo
|
61
66
|
@@xml_cache[name] = wobj.generate_initial_workflow
|
62
67
|
wobj
|
63
68
|
end
|
64
|
-
|
65
69
|
end
|
66
70
|
end
|
@@ -1,9 +1,10 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'pathname'
|
2
4
|
|
3
5
|
module Dor
|
4
6
|
# Remove all traces of the object's data files from the workspace and export areas
|
5
7
|
class CleanupResetService
|
6
|
-
|
7
8
|
# @param [String] druid The identifier for the object whose reset data is to be removed
|
8
9
|
# @return [void] remove copy of the reset data that was exported to preservation core
|
9
10
|
def self.cleanup_by_reset_druid(druid)
|
@@ -1,9 +1,10 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'pathname'
|
2
4
|
|
3
5
|
module Dor
|
4
6
|
# Remove all traces of the object's data files from the workspace and export areas
|
5
7
|
class CleanupService
|
6
|
-
|
7
8
|
# @param [LyberCore::Robots::WorkItem] dor_item The DOR work item whose workspace should be cleaned up
|
8
9
|
# @return [void] Delete all workspace and export entities for the druid
|
9
10
|
def self.cleanup(dor_item)
|
@@ -1,9 +1,10 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'net/ssh'
|
2
4
|
require 'net/sftp'
|
3
5
|
|
4
6
|
module Dor
|
5
7
|
class DigitalStacksService
|
6
|
-
|
7
8
|
# Delete files from stacks that have change type 'deleted', 'copydeleted', or 'modified'
|
8
9
|
# @param [Pathname] stacks_object_pathname the stacks location of the digital object
|
9
10
|
# @param [Moab::FileGroupDifference] content_diff the content file version differences report
|
@@ -80,6 +81,7 @@ module Dor
|
|
80
81
|
# @param [Moab::FileGroupDifference] content_diff the content file version differences report
|
81
82
|
def self.shelve_to_stacks(workspace_content_pathname, stacks_object_pathname, content_diff)
|
82
83
|
return false if workspace_content_pathname.nil?
|
84
|
+
|
83
85
|
[:added, :copyadded, :modified].each do |change_type|
|
84
86
|
subset = content_diff.subset(change_type) # {Moab::FileGroupDifferenceSubset
|
85
87
|
subset.files.each do |moab_file| # {Moab::FileInstanceDifference}
|
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'benchmark'
|
2
4
|
|
3
5
|
module Dor
|
@@ -38,7 +40,7 @@ module Dor
|
|
38
40
|
def self.reindex_pid_remotely(pid)
|
39
41
|
pid = "druid:#{pid}" unless pid =~ /^druid:/
|
40
42
|
realtime = Benchmark.realtime do
|
41
|
-
with_retries(max_tries: 3, rescue: [
|
43
|
+
with_retries(max_tries: 3, rescue: [RestClient::Exception, Errno::ECONNREFUSED]) do
|
42
44
|
RestClient.post("#{Config.dor_indexing_app.url}/reindex/#{pid}", '')
|
43
45
|
end
|
44
46
|
end
|
@@ -1,6 +1,7 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
module Dor
|
2
4
|
class MergeService
|
3
|
-
|
4
5
|
def self.merge_into_primary(primary_druid, secondary_druids, tag, logger = nil)
|
5
6
|
# TODO: test the secondary_obj to see if we've processed it already
|
6
7
|
merge_service = Dor::MergeService.new primary_druid, secondary_druids, tag, logger
|
@@ -13,7 +14,7 @@ module Dor
|
|
13
14
|
def initialize(primary_druid, secondary_pids, tag, logger = nil)
|
14
15
|
@primary = Dor.find primary_druid
|
15
16
|
@secondary_pids = secondary_pids
|
16
|
-
@secondary_objs = secondary_pids.map {|pid|
|
17
|
+
@secondary_objs = secondary_pids.map { |pid| Dor.find pid }
|
17
18
|
if logger.nil?
|
18
19
|
@logger = Logger.new(STDERR)
|
19
20
|
else
|
@@ -24,7 +25,8 @@ module Dor
|
|
24
25
|
|
25
26
|
def check_objects_editable
|
26
27
|
raise Dor::Exception, "Primary object is not editable: #{@primary.pid}" unless @primary.allows_modification?
|
27
|
-
|
28
|
+
|
29
|
+
non_editable = @secondary_objs.detect { |obj| !obj.allows_modification? }
|
28
30
|
raise Dor::Exception, "Secondary object is not editable: #{non_editable.pid}" if non_editable
|
29
31
|
end
|
30
32
|
|
@@ -48,7 +50,7 @@ module Dor
|
|
48
50
|
primary_resource = primary_cm.at_xpath "//resource[attr[@name = 'mergedFromPid']/text() = '#{secondary.pid}' and
|
49
51
|
attr[@name = 'mergedFromResource']/text() = '#{src_resource['id']}' ]"
|
50
52
|
sequence = primary_resource['sequence']
|
51
|
-
src_resource.xpath('//file/@id').map {|id| id.value }.each do |file_id|
|
53
|
+
src_resource.xpath('//file/@id').map { |id| id.value }.each do |file_id|
|
52
54
|
copy_path = sec_druid.find_content file_id
|
53
55
|
new_name = secondary.new_secondary_file_name(file_id, sequence)
|
54
56
|
# TODO: verify new_name exists in primary_cm?
|