dor-services 5.2.0 → 5.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +5 -13
  2. data/config/certs/robots-dor-dev.crt +29 -0
  3. data/config/certs/robots-dor-dev.key +27 -0
  4. data/config/config_defaults.yml +2 -0
  5. data/config/dev_console_env.rb +77 -0
  6. data/lib/dor-services.rb +31 -27
  7. data/lib/dor/config.rb +25 -19
  8. data/lib/dor/datastreams/administrative_metadata_ds.rb +19 -20
  9. data/lib/dor/datastreams/content_metadata_ds.rb +238 -177
  10. data/lib/dor/datastreams/datastream_spec_solrizer.rb +1 -1
  11. data/lib/dor/datastreams/default_object_rights_ds.rb +99 -16
  12. data/lib/dor/datastreams/desc_metadata_ds.rb +37 -34
  13. data/lib/dor/datastreams/embargo_metadata_ds.rb +16 -16
  14. data/lib/dor/datastreams/events_ds.rb +2 -2
  15. data/lib/dor/datastreams/geo_metadata_ds.rb +5 -10
  16. data/lib/dor/datastreams/identity_metadata_ds.rb +22 -22
  17. data/lib/dor/datastreams/rights_metadata_ds.rb +43 -32
  18. data/lib/dor/datastreams/role_metadata_ds.rb +5 -5
  19. data/lib/dor/datastreams/simple_dublin_core_ds.rb +13 -14
  20. data/lib/dor/datastreams/version_metadata_ds.rb +22 -23
  21. data/lib/dor/datastreams/workflow_definition_ds.rb +15 -15
  22. data/lib/dor/datastreams/workflow_ds.rb +64 -70
  23. data/lib/dor/exceptions.rb +0 -1
  24. data/lib/dor/migrations/identifiable/uriify_augmented_contentlocation_refs.rb +4 -4
  25. data/lib/dor/migrations/processable/unify_workflows.rb +1 -1
  26. data/lib/dor/models/admin_policy_object.rb +4 -4
  27. data/lib/dor/models/assembleable.rb +2 -3
  28. data/lib/dor/models/collection.rb +1 -1
  29. data/lib/dor/models/contentable.rb +113 -108
  30. data/lib/dor/models/describable.rb +136 -95
  31. data/lib/dor/models/editable.rb +205 -119
  32. data/lib/dor/models/embargoable.rb +16 -16
  33. data/lib/dor/models/eventable.rb +2 -2
  34. data/lib/dor/models/geoable.rb +3 -3
  35. data/lib/dor/models/governable.rb +25 -26
  36. data/lib/dor/models/identifiable.rb +66 -55
  37. data/lib/dor/models/item.rb +0 -1
  38. data/lib/dor/models/itemizable.rb +7 -8
  39. data/lib/dor/models/preservable.rb +7 -8
  40. data/lib/dor/models/processable.rb +76 -73
  41. data/lib/dor/models/publishable.rb +25 -30
  42. data/lib/dor/models/releaseable.rb +118 -155
  43. data/lib/dor/models/rightsable.rb +2 -3
  44. data/lib/dor/models/set.rb +1 -1
  45. data/lib/dor/models/shelvable.rb +8 -10
  46. data/lib/dor/models/upgradable.rb +5 -6
  47. data/lib/dor/models/versionable.rb +3 -4
  48. data/lib/dor/models/workflow_object.rb +15 -16
  49. data/lib/dor/services/cleanup_reset_service.rb +15 -16
  50. data/lib/dor/services/cleanup_service.rb +2 -4
  51. data/lib/dor/services/digital_stacks_service.rb +10 -13
  52. data/lib/dor/services/merge_service.rb +8 -9
  53. data/lib/dor/services/metadata_handlers/catalog_handler.rb +1 -1
  54. data/lib/dor/services/metadata_handlers/mdtoolkit_handler.rb +3 -3
  55. data/lib/dor/services/metadata_service.rb +19 -20
  56. data/lib/dor/services/registration_service.rb +80 -61
  57. data/lib/dor/services/reset_workspace_service.rb +6 -10
  58. data/lib/dor/services/sdr_ingest_service.rb +15 -16
  59. data/lib/dor/services/search_service.rb +18 -23
  60. data/lib/dor/services/suri_service.rb +6 -6
  61. data/lib/dor/services/technical_metadata_service.rb +27 -44
  62. data/lib/dor/utils/ng_tidy.rb +3 -3
  63. data/lib/dor/utils/sdr_client.rb +2 -3
  64. data/lib/dor/utils/solr_doc_helper.rb +1 -3
  65. data/lib/dor/version.rb +1 -1
  66. data/lib/dor/workflow/document.rb +43 -40
  67. data/lib/dor/workflow/graph.rb +26 -26
  68. data/lib/dor/workflow/process.rb +34 -35
  69. data/lib/tasks/rdoc.rake +5 -5
  70. metadata +129 -111
  71. data/lib/dor/models/presentable.rb +0 -146
@@ -7,19 +7,18 @@ module Dor
7
7
  end
8
8
 
9
9
  def build_rightsMetadata_datastream(ds)
10
- content_ds = self.admin_policy_object.datastreams['defaultObjectRights']
10
+ content_ds = admin_policy_object.datastreams['defaultObjectRights']
11
11
  ds.dsLabel = 'Rights Metadata'
12
12
  ds.ng_xml = content_ds.ng_xml.clone
13
13
  ds.content = ds.ng_xml.to_xml
14
14
  end
15
15
 
16
16
  def world_doc
17
- return Nokogiri::XML::Builder.new do |xml|
17
+ Nokogiri::XML::Builder.new do |xml|
18
18
  xml.access(:type => 'read') {
19
19
  xml.machine { xml.world }
20
20
  }
21
21
  end.doc
22
22
  end
23
-
24
23
  end
25
24
  end
@@ -7,7 +7,7 @@ module Dor
7
7
  include Publishable
8
8
  include Versionable
9
9
 
10
- has_many :members, :property => :is_member_of_collection, :inbound => true, :class_name => "ActiveFedora::Base"
10
+ has_many :members, :property => :is_member_of_collection, :inbound => true, :class_name => 'ActiveFedora::Base'
11
11
  has_object_type 'set'
12
12
  end
13
13
  end
@@ -14,7 +14,7 @@ module Dor
14
14
  stacks_druid = DruidTools::StacksDruid.new id, stacks_object_pathname
15
15
  stacks_object_pathname = Pathname(stacks_druid.path)
16
16
  # determine the location of the object's content files in the workspace area
17
- workspace_druid = DruidTools::Druid.new(id,Config.stacks.local_workspace_root)
17
+ workspace_druid = DruidTools::Druid.new(id, Config.stacks.local_workspace_root)
18
18
  workspace_content_pathname = workspace_content_dir(shelve_diff, workspace_druid)
19
19
  # delete, rename, or copy files to the stacks area
20
20
  DigitalStacksService.remove_from_stacks(stacks_object_pathname, shelve_diff)
@@ -25,9 +25,9 @@ module Dor
25
25
  # retrieve the differences between the current contentMetadata and the previously ingested version
26
26
  # (filtering to select only the files that should be shelved to stacks)
27
27
  def get_shelve_diff
28
- inventory_diff_xml = self.get_content_diff(:shelve)
28
+ inventory_diff_xml = get_content_diff(:shelve)
29
29
  inventory_diff = Moab::FileInventoryDifference.parse(inventory_diff_xml)
30
- shelve_diff = inventory_diff.group_difference("content")
30
+ shelve_diff = inventory_diff.group_difference('content')
31
31
  shelve_diff
32
32
  end
33
33
 
@@ -37,7 +37,7 @@ module Dor
37
37
  # @return [Pathname] The location of the object's content files in the workspace area
38
38
  def workspace_content_dir(content_diff, workspace_druid)
39
39
  deltas = content_diff.file_deltas
40
- filelist = deltas[:modified] + deltas[:added] + deltas[:copyadded].collect{|old,new| new}
40
+ filelist = deltas[:modified] + deltas[:added] + deltas[:copyadded].collect {|old, new| new}
41
41
  return nil if filelist.empty?
42
42
  content_pathname = Pathname(workspace_druid.find_filelist_parent('content', filelist))
43
43
  content_pathname
@@ -46,15 +46,13 @@ module Dor
46
46
  # get the stack location based on the contentMetadata stacks attribute
47
47
  # or using the default value from the config file if it doesn't exist
48
48
  def get_stacks_location
49
-
50
- contentMetadataDS = self.datastreams['contentMetadata']
49
+ contentMetadataDS = datastreams['contentMetadata']
51
50
  unless contentMetadataDS.nil? || contentMetadataDS.stacks.length == 0
52
51
  stacks_location = contentMetadataDS.stacks[0]
53
- return stacks_location if stacks_location.start_with? "/" #Absolute stacks path
54
- raise "stacks attribute for item: "+self.id+ " contentMetadata should start with /. The current value is "+stacks_location
52
+ return stacks_location if stacks_location.start_with? '/' # Absolute stacks path
53
+ raise 'stacks attribute for item: ' + id + ' contentMetadata should start with /. The current value is ' + stacks_location
55
54
  end
56
- return Config.stacks.local_stacks_root #Default stacks
57
-
55
+ Config.stacks.local_stacks_root # Default stacks
58
56
  end
59
57
  end
60
58
  end
@@ -1,6 +1,5 @@
1
1
  module Dor
2
2
  module Upgradable
3
-
4
3
  # The Upgradable mixin is responsible for making sure all DOR objects,
5
4
  # concerns, and datastreams know how to upgrade themselves to the latest
6
5
  # Chimera/DOR content standards.
@@ -25,7 +24,7 @@ module Dor
25
24
 
26
25
  mattr_accessor :__upgrade_callbacks
27
26
  @@__upgrade_callbacks = []
28
- def self.add_upgrade_callback c, v, d, &b
27
+ def self.add_upgrade_callback(c, v, d, &b)
29
28
  @@__upgrade_callbacks << Callback.new(c, Gem::Version.new(v), d, b)
30
29
  end
31
30
 
@@ -46,11 +45,11 @@ module Dor
46
45
 
47
46
  def self.included(base)
48
47
  base.instance_eval do
49
- def self.on_upgrade version, desc, &block
48
+ def self.on_upgrade(version, desc, &block)
50
49
  Dor::Upgradable.add_upgrade_callback self, version, desc, &block
51
50
  end
52
51
 
53
- Dir[File.join(Dor.root,'dor','migrations',base.name.split(/::/).last.underscore,'*.rb')].each do |migration|
52
+ Dir[File.join(Dor.root, 'dor', 'migrations', base.name.split(/::/).last.underscore, '*.rb')].each do |migration|
54
53
  require migration
55
54
  end
56
55
  end
@@ -59,13 +58,13 @@ module Dor
59
58
  def upgrade!
60
59
  results = [Dor::Upgradable.run_upgrade_callbacks(self, self)]
61
60
  if self.respond_to?(:datastreams)
62
- self.datastreams.each_pair do |dsid, ds|
61
+ datastreams.each_pair do |dsid, ds|
63
62
  results << Dor::Upgradable.run_upgrade_callbacks(ds, self) unless ds.new?
64
63
  end
65
64
  end
66
65
 
67
66
  if results.any?
68
- self.save
67
+ save
69
68
  else
70
69
  false
71
70
  end
@@ -39,7 +39,7 @@ module Dor
39
39
 
40
40
  vmd_upd_info = opts[:vers_md_upd_info]
41
41
  return unless vmd_upd_info
42
- datastreams['events'].add_event("open", vmd_upd_info[:opening_user_name], "Version #{vmd_ds.current_version_id} opened")
42
+ datastreams['events'].add_event('open', vmd_upd_info[:opening_user_name], "Version #{vmd_ds.current_version_id} opened")
43
43
  vmd_ds.update_current_version({:description => vmd_upd_info[:description], :significance => vmd_upd_info[:significance].to_sym})
44
44
  save
45
45
  end
@@ -57,7 +57,7 @@ module Dor
57
57
  # @option opts [Boolean] :start_accesion set to true if you want accessioning to start (default), false otherwise
58
58
  # @raise [Dor::Exception] if the object hasn't been opened for versioning, or if accessionWF has
59
59
  # already been instantiated or the current version is missing a tag or description
60
- def close_version(opts={})
60
+ def close_version(opts = {})
61
61
  unless opts.empty?
62
62
  datastreams['versionMetadata'].update_current_version opts
63
63
  datastreams['versionMetadata'].save
@@ -78,7 +78,7 @@ module Dor
78
78
 
79
79
  # @return [Boolean] true if the object is in a state that allows it to be modified. States that will allow modification are: has not been submitted for accessioning, has an open version or has sdr-ingest set to hold
80
80
  def allows_modification?
81
- if Dor::WorkflowService.get_lifecycle('dor', pid, 'submitted') && !new_version_open? && Dor::WorkflowService.get_workflow_status('dor', pid, 'accessionWF', 'sdr-ingest-transfer')!='hold'
81
+ if Dor::WorkflowService.get_lifecycle('dor', pid, 'submitted') && !new_version_open? && Dor::WorkflowService.get_workflow_status('dor', pid, 'accessionWF', 'sdr-ingest-transfer') != 'hold'
82
82
  false
83
83
  else
84
84
  true
@@ -88,6 +88,5 @@ module Dor
88
88
  # Following chart of processes on this consul page: https://consul.stanford.edu/display/chimera/Versioning+workflows
89
89
  alias_method :start_version, :open_new_version
90
90
  alias_method :submit_version, :close_version
91
-
92
91
  end
93
92
  end
@@ -3,16 +3,15 @@ require 'dor/datastreams/workflow_definition_ds'
3
3
  module Dor
4
4
  class WorkflowObject < ::ActiveFedora::Base
5
5
  include Identifiable
6
- include SolrDocHelper
7
6
  include Governable
8
- @@xml_cache = {}
7
+ @@xml_cache = {}
9
8
  @@repo_cache = {}
10
9
 
11
10
  has_object_type 'workflow'
12
- has_metadata :name => "workflowDefinition", :type => Dor::WorkflowDefinitionDs, :label => 'Workflow Definition'
11
+ has_metadata :name => 'workflowDefinition', :type => Dor::WorkflowDefinitionDs, :label => 'Workflow Definition'
13
12
 
14
- def self.find_by_name(name, opts={})
15
- Dor.find_all(%{#{Solrizer.solr_name 'objectType', :symbol}:"#{self.object_type}" #{Solrizer.solr_name 'workflow_name', :symbol}:"#{name}"}, opts).first
13
+ def self.find_by_name(name, opts = {})
14
+ Dor.find_all(%(#{Solrizer.solr_name 'objectType', :symbol}:"#{object_type}" #{Solrizer.solr_name 'workflow_name', :symbol}:"#{name}"), opts).first
16
15
  end
17
16
 
18
17
  # Searches for the workflow definition object in DOR, then
@@ -22,8 +21,7 @@ module Dor
22
21
  # @return [String] the initial workflow xml
23
22
  def self.initial_workflow(name)
24
23
  return @@xml_cache[name] if @@xml_cache.include?(name)
25
-
26
- self.find_and_cache_workflow_xml_and_repo name
24
+ find_and_cache_workflow_xml_and_repo name
27
25
  @@xml_cache[name]
28
26
  end
29
27
 
@@ -33,8 +31,7 @@ module Dor
33
31
  # @return [String] the initial workflow xml
34
32
  def self.initial_repo(name)
35
33
  return @@repo_cache[name] if @@repo_cache.include?(name)
36
-
37
- self.find_and_cache_workflow_xml_and_repo name
34
+ find_and_cache_workflow_xml_and_repo name
38
35
  @@repo_cache[name]
39
36
  end
40
37
 
@@ -42,11 +39,11 @@ module Dor
42
39
  datastreams['workflowDefinition']
43
40
  end
44
41
 
45
- def graph *args
46
- self.definition.graph *args
42
+ def graph(*args)
43
+ definition.graph *args
47
44
  end
48
45
 
49
- def to_solr solr_doc=Hash.new, *args
46
+ def to_solr(solr_doc = {}, *args)
50
47
  super solr_doc, *args
51
48
  client = Dor::WorkflowService.workflow_resource
52
49
  xml = client["workflow_archive?repository=#{definition.repo}&workflow=#{definition.name}&count-only=true"].get
@@ -63,11 +60,13 @@ module Dor
63
60
 
64
61
  # Searches DOR for the workflow definition object. It then caches the workflow repository and xml
65
62
  # @param [String] name the name of the workflow
66
- def self.find_and_cache_workflow_xml_and_repo name
67
- wobj = self.find_by_name(name)
68
- wf_xml = wobj.generate_initial_workflow
63
+ # @return [Object] a Dor::xxxx object, e.g. a Dor::Item object
64
+ def self.find_and_cache_workflow_xml_and_repo(name)
65
+ wobj = find_by_name(name)
66
+ raise "Failed to find workflow via find_by_name('#{name}')" if wobj.nil?
69
67
  @@repo_cache[name] = wobj.definition.repo
70
- @@xml_cache[name] = wf_xml
68
+ @@xml_cache[name] = wobj.generate_initial_workflow
69
+ wobj
71
70
  end
72
71
 
73
72
  end
@@ -1,7 +1,6 @@
1
1
  require 'pathname'
2
2
 
3
3
  module Dor
4
-
5
4
  # Remove all traces of the object's data files from the workspace and export areas
6
5
  class CleanupResetService
7
6
 
@@ -18,22 +17,22 @@ module Dor
18
17
  druid_obj = Dor::Item.find(druid)
19
18
  last_version = druid_obj.current_version.to_i
20
19
 
21
- #if the current version is still open, avoid this versioned directory
22
- if Dor::WorkflowService.get_lifecycle('dor', druid, 'accessioned').nil? then
20
+ # if the current version is still open, avoid this versioned directory
21
+ if Dor::WorkflowService.get_lifecycle('dor', druid, 'accessioned').nil?
23
22
  last_version -= 1
24
23
  end
25
- return last_version
24
+ last_version
26
25
  end
27
26
 
28
27
  # @param [String] druid The identifier for the object whose reset data is to be removed
29
28
  # @param [String] base The base directory to delete from
30
29
  # @param [Integer] last_version The last version that the data should be removed until version 1
31
30
  # @return [void] remove all the object's reset data files from the workspace area equal to less than the last_version
32
- def self.cleanup_reset_workspace_content(druid,last_version, base)
31
+ def self.cleanup_reset_workspace_content(druid, last_version, base)
33
32
  base_druid = DruidTools::Druid.new(druid, base)
34
33
  base_druid_tree = base_druid.pathname.to_s
35
- #if it is truncated tree /aa/111/aaa/1111/content,
36
- #we should follow the regular cleanup technique
34
+ # if it is truncated tree /aa/111/aaa/1111/content,
35
+ # we should follow the regular cleanup technique
37
36
 
38
37
  reset_directories = get_reset_dir_list(last_version, base_druid_tree)
39
38
  reset_directories.each do |path|
@@ -47,11 +46,11 @@ module Dor
47
46
  # @return [void] prepares a list of reset directories that should be removed
48
47
  def self.get_reset_dir_list(last_version, base_druid_tree)
49
48
  reset_directories = []
50
- for i in 1..last_version
49
+ (1..last_version).each do |i|
51
50
  reset_path = "#{base_druid_tree}_v#{i}"
52
- reset_directories.append(reset_path) if File.exists?(reset_path)
51
+ reset_directories.append(reset_path) if File.exist?(reset_path)
53
52
  end
54
- return reset_directories
53
+ reset_directories
55
54
  end
56
55
 
57
56
  # @param [String] druid The identifier for the object whose reset bags data is to be removed
@@ -76,11 +75,11 @@ module Dor
76
75
  # @return [void] prepares a list of reset bag directories that should be removed
77
76
  def self.get_reset_bag_dir_list(last_version, base_bag_directory)
78
77
  reset_bags = []
79
- for i in 1..last_version do
78
+ (1..last_version).each do |i|
80
79
  reset_path = "#{base_bag_directory}_v#{i}"
81
- reset_bags.append(reset_path) if File.exists?(reset_path)
80
+ reset_bags.append(reset_path) if File.exist?(reset_path)
82
81
  end
83
- return reset_bags
82
+ reset_bags
84
83
  end
85
84
 
86
85
  # @param [String] base_bag_directory The base bag directory including the export home and druid id
@@ -88,11 +87,11 @@ module Dor
88
87
  # @return [void] prepares a list of reset bag tars that should be removed
89
88
  def self.get_reset_bag_tar_list(last_version, base_bag_directory)
90
89
  reset_bags = []
91
- for i in 1..last_version do
90
+ (1..last_version).each do |i|
92
91
  reset_path = "#{base_bag_directory}_v#{i}.tar"
93
- reset_bags.append(reset_path) if File.exists?(reset_path)
92
+ reset_bags.append(reset_path) if File.exist?(reset_path)
94
93
  end
95
- return reset_bags
94
+ reset_bags
96
95
  end
97
96
 
98
97
  # @param [String] druid The identifier for the object whose data is to be removed
@@ -1,7 +1,6 @@
1
1
  require 'pathname'
2
2
 
3
3
  module Dor
4
-
5
4
  # Remove all traces of the object's data files from the workspace and export areas
6
5
  class CleanupService
7
6
 
@@ -30,9 +29,9 @@ module Dor
30
29
  def self.cleanup_export(druid)
31
30
  id = druid.split(':').last
32
31
  bag_dir = File.join(Config.cleanup.local_export_home, id)
33
- self.remove_branch(bag_dir)
32
+ remove_branch(bag_dir)
34
33
  tarfile = "#{bag_dir}.tar"
35
- self.remove_branch(tarfile)
34
+ remove_branch(tarfile)
36
35
  end
37
36
 
38
37
  # @param [Pathname,String] pathname The full path of the branch to be removed
@@ -84,5 +83,4 @@ module Dor
84
83
  Dor::SearchService.solr.commit
85
84
  end
86
85
  end
87
-
88
86
  end
@@ -2,7 +2,6 @@ require 'net/ssh'
2
2
  require 'net/sftp'
3
3
 
4
4
  module Dor
5
-
6
5
  class DigitalStacksService
7
6
 
8
7
  # Delete files from stacks that have change type 'deleted', 'copydeleted', or 'modified'
@@ -14,7 +13,7 @@ module Dor
14
13
  subset.files.each do |moab_file| # {Moab::FileInstanceDifference}
15
14
  moab_signature = moab_file.signatures.first # {Moab::FileSignature}
16
15
  file_pathname = stacks_object_pathname.join(moab_file.basis_path)
17
- self.delete_file(file_pathname, moab_signature)
16
+ delete_file(file_pathname, moab_signature)
18
17
  end
19
18
  end
20
19
  end
@@ -26,12 +25,12 @@ module Dor
26
25
  def self.delete_file(file_pathname, moab_signature)
27
26
  if file_pathname.exist? && (file_pathname.size == moab_signature.size)
28
27
  file_signature = Moab::FileSignature.new.signature_from_file(file_pathname)
29
- if (file_signature == moab_signature)
28
+ if file_signature == moab_signature
30
29
  file_pathname.delete
31
30
  return true
32
31
  end
33
32
  end
34
- return false
33
+ false
35
34
  end
36
35
 
37
36
  # Rename files from stacks that have change type 'renamed' using an intermediate temp filename.
@@ -46,7 +45,7 @@ module Dor
46
45
  moab_signature = moab_file.signatures.first # {Moab::FileSignature}
47
46
  original_pathname = stacks_object_pathname.join(moab_file.basis_path)
48
47
  temp_pathname = stacks_object_pathname.join(moab_signature.checksums.values.last)
49
- self.rename_file(original_pathname, temp_pathname, moab_signature)
48
+ rename_file(original_pathname, temp_pathname, moab_signature)
50
49
  end
51
50
 
52
51
  # 2nd Pass - rename files from checksum-based name to new name
@@ -54,9 +53,8 @@ module Dor
54
53
  moab_signature = moab_file.signatures.first # {Moab::FileSignature}
55
54
  temp_pathname = stacks_object_pathname.join(moab_signature.checksums.values.last)
56
55
  new_pathname = stacks_object_pathname.join(moab_file.other_path)
57
- self.rename_file(temp_pathname, new_pathname, moab_signature)
56
+ rename_file(temp_pathname, new_pathname, moab_signature)
58
57
  end
59
-
60
58
  end
61
59
 
62
60
  # Rename a file, but only if it exists and has the expected signature
@@ -67,13 +65,13 @@ module Dor
67
65
  def self.rename_file(old_pathname, new_pathname, moab_signature)
68
66
  if old_pathname.exist? && (old_pathname.size == moab_signature.size)
69
67
  file_signature = Moab::FileSignature.new.signature_from_file(old_pathname)
70
- if (file_signature == moab_signature)
68
+ if file_signature == moab_signature
71
69
  new_pathname.parent.mkpath
72
70
  old_pathname.rename(new_pathname)
73
71
  return true
74
72
  end
75
73
  end
76
- return false
74
+ false
77
75
  end
78
76
 
79
77
  # Add files to stacks that have change type 'added', 'copyadded' or 'modified'.
@@ -89,7 +87,7 @@ module Dor
89
87
  filename = (change_type == :modified) ? moab_file.basis_path : moab_file.other_path
90
88
  workspace_pathname = workspace_content_pathname.join(filename)
91
89
  stacks_pathname = stacks_object_pathname.join(filename)
92
- self.copy_file(workspace_pathname, stacks_pathname, moab_signature)
90
+ copy_file(workspace_pathname, stacks_pathname, moab_signature)
93
91
  end
94
92
  end
95
93
  true
@@ -110,7 +108,7 @@ module Dor
110
108
  FileUtils.cp workspace_pathname.to_s, stacks_pathname.to_s
111
109
  return true
112
110
  end
113
- return false
111
+ false
114
112
  end
115
113
 
116
114
  ### depricated ???
@@ -127,7 +125,7 @@ module Dor
127
125
  end
128
126
 
129
127
  # Assumes the digital stacks storage root is mounted to the local file system
130
- # TODO since this is delegating to the Druid, this method may not be necessary
128
+ # TODO: since this is delegating to the Druid, this method may not be necessary
131
129
  def self.prune_stacks_dir(id)
132
130
  stacks_druid_tree = DruidTools::StacksDruid.new(id, Config.stacks.local_stacks_root)
133
131
  stacks_druid_tree.prune!
@@ -138,5 +136,4 @@ module Dor
138
136
  druid.prune!
139
137
  end
140
138
  end
141
-
142
139
  end
@@ -1,9 +1,8 @@
1
1
  module Dor
2
-
3
2
  class MergeService
4
3
 
5
- def self.merge_into_primary primary_druid, secondary_druids, tag, logger = nil
6
- # TODO test the secondary_obj to see if we've processed it already
4
+ def self.merge_into_primary(primary_druid, secondary_druids, tag, logger = nil)
5
+ # TODO: test the secondary_obj to see if we've processed it already
7
6
  merge_service = Dor::MergeService.new primary_druid, secondary_druids, tag, logger
8
7
  merge_service.check_objects_editable
9
8
  merge_service.move_metadata_and_content
@@ -11,7 +10,7 @@ module Dor
11
10
  # kick off commonAccessioning for the primary?
12
11
  end
13
12
 
14
- def initialize primary_druid, secondary_pids, tag, logger = nil
13
+ def initialize(primary_druid, secondary_pids, tag, logger = nil)
15
14
  @primary = Dor::Item.find primary_druid
16
15
  @secondary_pids = secondary_pids
17
16
  @secondary_objs = secondary_pids.map {|pid| Dor::Item.find pid }
@@ -45,14 +44,14 @@ module Dor
45
44
 
46
45
  @secondary_objs.each do |secondary|
47
46
  sec_druid = DruidTools::Druid.new secondary.pid, Dor::Config.stacks.local_workspace_root
48
- secondary.contentMetadata.ng_xml.xpath("//resource").each do |src_resource|
47
+ secondary.contentMetadata.ng_xml.xpath('//resource').each do |src_resource|
49
48
  primary_resource = primary_cm.at_xpath "//resource[attr[@name = 'mergedFromPid']/text() = '#{secondary.pid}' and
50
49
  attr[@name = 'mergedFromResource']/text() = '#{src_resource['id']}' ]"
51
50
  sequence = primary_resource['sequence']
52
- src_resource.xpath("//file/@id").map {|id| id.value }.each do |file_id|
51
+ src_resource.xpath('//file/@id').map {|id| id.value }.each do |file_id|
53
52
  copy_path = sec_druid.find_content file_id
54
53
  new_name = secondary.new_secondary_file_name(file_id, sequence)
55
- # TODO verify new_name exists in primary_cm?
54
+ # TODO: verify new_name exists in primary_cm?
56
55
  FileUtils.cp(copy_path, File.join(dest_path, "/#{new_name}"))
57
56
  end
58
57
  end
@@ -78,13 +77,13 @@ module Dor
78
77
  end
79
78
 
80
79
  # Remove content from stacks
81
- # TODO might set workflow status in future for robot to do
80
+ # TODO: might set workflow status in future for robot to do
82
81
  def unshelve
83
82
  DigitalStacksService.prune_stacks_dir @current_secondary.pid
84
83
  end
85
84
 
86
85
  # Withdraw item from Purl
87
- # TODO might set workflow status in future for robot to do
86
+ # TODO: might set workflow status in future for robot to do
88
87
  def unpublish
89
88
  @current_secondary.publish_metadata
90
89
  end