dor-services 5.1.1 → 5.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. checksums.yaml +8 -8
  2. data/lib/dor-services.rb +1 -2
  3. data/lib/dor/config.rb +5 -6
  4. data/lib/dor/datastreams/content_metadata_ds.rb +17 -20
  5. data/lib/dor/datastreams/datastream_spec_solrizer.rb +1 -1
  6. data/lib/dor/datastreams/desc_metadata_ds.rb +7 -7
  7. data/lib/dor/datastreams/embargo_metadata_ds.rb +2 -7
  8. data/lib/dor/datastreams/events_ds.rb +9 -9
  9. data/lib/dor/datastreams/identity_metadata_ds.rb +29 -34
  10. data/lib/dor/datastreams/rights_metadata_ds.rb +1 -1
  11. data/lib/dor/datastreams/role_metadata_ds.rb +0 -1
  12. data/lib/dor/datastreams/simple_dublin_core_ds.rb +12 -13
  13. data/lib/dor/datastreams/version_metadata_ds.rb +11 -15
  14. data/lib/dor/datastreams/workflow_definition_ds.rb +18 -22
  15. data/lib/dor/datastreams/workflow_ds.rb +24 -36
  16. data/lib/dor/migrations/identifiable/assert_adminPolicy.rb +1 -1
  17. data/lib/dor/migrations/identifiable/fix_model_assertions.rb +1 -1
  18. data/lib/dor/migrations/identifiable/record_remediation.rb +2 -2
  19. data/lib/dor/migrations/identifiable/uriify_augmented_contentlocation_refs.rb +1 -1
  20. data/lib/dor/migrations/identifiable/uriify_contentlocation_refs.rb +1 -1
  21. data/lib/dor/migrations/processable/unify_workflows.rb +4 -4
  22. data/lib/dor/migrations/versionable/add_missing_version_md.rb +2 -2
  23. data/lib/dor/models/assembleable.rb +2 -2
  24. data/lib/dor/models/collection.rb +1 -0
  25. data/lib/dor/models/contentable.rb +3 -3
  26. data/lib/dor/models/describable.rb +16 -13
  27. data/lib/dor/models/editable.rb +3 -3
  28. data/lib/dor/models/embargoable.rb +2 -2
  29. data/lib/dor/models/eventable.rb +2 -2
  30. data/lib/dor/models/geoable.rb +14 -18
  31. data/lib/dor/models/governable.rb +1 -1
  32. data/lib/dor/models/identifiable.rb +36 -57
  33. data/lib/dor/models/itemizable.rb +6 -6
  34. data/lib/dor/models/presentable.rb +12 -12
  35. data/lib/dor/models/preservable.rb +2 -5
  36. data/lib/dor/models/processable.rb +19 -25
  37. data/lib/dor/models/publishable.rb +2 -2
  38. data/lib/dor/models/releaseable.rb +165 -212
  39. data/lib/dor/models/shelvable.rb +10 -14
  40. data/lib/dor/models/upgradable.rb +11 -11
  41. data/lib/dor/models/versionable.rb +16 -21
  42. data/lib/dor/models/workflow_object.rb +3 -3
  43. data/lib/dor/services/cleanup_reset_service.rb +32 -27
  44. data/lib/dor/services/digital_stacks_service.rb +3 -3
  45. data/lib/dor/services/merge_service.rb +4 -8
  46. data/lib/dor/services/metadata_handlers/catalog_handler.rb +1 -1
  47. data/lib/dor/services/metadata_handlers/mdtoolkit_handler.rb +4 -6
  48. data/lib/dor/services/metadata_service.rb +20 -22
  49. data/lib/dor/services/registration_service.rb +6 -8
  50. data/lib/dor/services/reset_workspace_service.rb +14 -16
  51. data/lib/dor/services/sdr_ingest_service.rb +2 -6
  52. data/lib/dor/services/search_service.rb +3 -3
  53. data/lib/dor/services/suri_service.rb +2 -3
  54. data/lib/dor/services/technical_metadata_service.rb +2 -3
  55. data/lib/dor/utils/ng_tidy.rb +6 -6
  56. data/lib/dor/utils/predicate_patch.rb +1 -1
  57. data/lib/dor/utils/solr_doc_helper.rb +2 -2
  58. data/lib/dor/version.rb +1 -1
  59. data/lib/dor/workflow/document.rb +27 -33
  60. data/lib/dor/workflow/graph.rb +34 -37
  61. data/lib/dor/workflow/process.rb +8 -8
  62. data/lib/tasks/rdoc.rake +5 -5
  63. metadata +4 -11
  64. data/bin/dor-indexer +0 -108
  65. data/bin/dor-indexerd +0 -73
  66. data/config/certs/robots-dor-dev.crt +0 -29
  67. data/config/certs/robots-dor-dev.key +0 -27
  68. data/config/dev_console_env.rb +0 -78
@@ -35,30 +35,26 @@ module Dor
35
35
  # @param [Moab::FileGroupDifference] content_diff The differences between the current contentMetadata and the previously ingested version
36
36
  # @param [DruidTools::Druid] workspace_druid the location of the object's files in the workspace area
37
37
  # @return [Pathname] The location of the object's content files in the workspace area
38
- def workspace_content_dir (content_diff, workspace_druid)
38
+ def workspace_content_dir(content_diff, workspace_druid)
39
39
  deltas = content_diff.file_deltas
40
40
  filelist = deltas[:modified] + deltas[:added] + deltas[:copyadded].collect{|old,new| new}
41
41
  return nil if filelist.empty?
42
42
  content_pathname = Pathname(workspace_druid.find_filelist_parent('content', filelist))
43
43
  content_pathname
44
44
  end
45
-
46
-
47
- # get the stack location based on the contentMetadata stacks attribute
45
+
46
+ # get the stack location based on the contentMetadata stacks attribute
48
47
  # or using the default value from the config file if it doesn't exist
49
48
  def get_stacks_location
50
-
49
+
51
50
  contentMetadataDS = self.datastreams['contentMetadata']
52
- unless contentMetadataDS.nil? or contentMetadataDS.stacks.length == 0
53
- stacks_location = contentMetadataDS.stacks[0]
54
- if stacks_location.start_with?"/" #Absolute stacks path
55
- return stacks_location
56
- else
57
- raise "stacks attribute for item: "+self.id+ " contentMetadata should start with /. The current value is "+stacks_location
58
- end
59
- end
51
+ unless contentMetadataDS.nil? || contentMetadataDS.stacks.length == 0
52
+ stacks_location = contentMetadataDS.stacks[0]
53
+ return stacks_location if stacks_location.start_with? "/" #Absolute stacks path
54
+ raise "stacks attribute for item: "+self.id+ " contentMetadata should start with /. The current value is "+stacks_location
55
+ end
60
56
  return Config.stacks.local_stacks_root #Default stacks
61
-
57
+
62
58
  end
63
59
  end
64
60
  end
@@ -1,16 +1,16 @@
1
1
  module Dor
2
2
  module Upgradable
3
3
 
4
- # The Upgradable mixin is responsible for making sure all DOR objects,
5
- # concerns, and datastreams know how to upgrade themselves to the latest
4
+ # The Upgradable mixin is responsible for making sure all DOR objects,
5
+ # concerns, and datastreams know how to upgrade themselves to the latest
6
6
  # Chimera/DOR content standards.
7
7
  #
8
8
  # To add a new upgrade:
9
9
  # 1) include Dor::Upgradable within whatever model, datastream, or mixin
10
10
  # you want to make upgradable.
11
11
  # 2) Add a block to the model, datastream, or mixin as follows:
12
- #
13
- # on_upgrade(v) do |obj|
12
+ #
13
+ # on_upgrade(v) do |obj|
14
14
  # # Do whatever needs to be done to obj
15
15
  # end
16
16
  #
@@ -20,7 +20,7 @@ module Dor
20
20
  # The block can either be defined on the model itself, or in a file
21
21
  # in the dor/migrations/[model] directory. See Dor::Identifiable and
22
22
  # dor/migrations/identifiable/* for an example.
23
-
23
+
24
24
  Callback = Struct.new :module, :version, :description, :block
25
25
 
26
26
  mattr_accessor :__upgrade_callbacks
@@ -28,12 +28,12 @@ module Dor
28
28
  def self.add_upgrade_callback c, v, d, &b
29
29
  @@__upgrade_callbacks << Callback.new(c, Gem::Version.new(v), d, b)
30
30
  end
31
-
31
+
32
32
  def self.run_upgrade_callbacks(obj, event_handler)
33
33
  relevant = @@__upgrade_callbacks.select { |c| obj.is_a?(c.module) }.sort_by(&:version)
34
- results = relevant.collect do |c|
34
+ results = relevant.collect do |c|
35
35
  result = c.block.call(obj)
36
- if result and event_handler.respond_to?(:add_event)
36
+ if result && event_handler.respond_to?(:add_event)
37
37
  event_handler.add_event 'remediation', "#{c.module.name} #{c.version}", c.description
38
38
  end
39
39
  if result
@@ -43,19 +43,19 @@ module Dor
43
43
  end
44
44
  results.any?
45
45
  end
46
-
46
+
47
47
  def self.included(base)
48
48
  base.instance_eval do
49
49
  def self.on_upgrade version, desc, &block
50
50
  Dor::Upgradable.add_upgrade_callback self, version, desc, &block
51
51
  end
52
-
52
+
53
53
  Dir[File.join(Dor.root,'dor','migrations',base.name.split(/::/).last.underscore,'*.rb')].each do |migration|
54
54
  require migration
55
55
  end
56
56
  end
57
57
  end
58
-
58
+
59
59
  def upgrade!
60
60
  results = [Dor::Upgradable.run_upgrade_callbacks(self, self)]
61
61
  if self.respond_to?(:datastreams)
@@ -16,14 +16,11 @@ module Dor
16
16
  # @option opts [Hash] :vers_md_upd_info If present, used to add to the events datastream and set the desc and significance on the versionMetadata datastream
17
17
  # @raise [Dor::Exception] if the object hasn't been accessioned, or if a version is already opened
18
18
  def open_new_version(opts = {})
19
- # During local development, we need a way to open a new version
20
- # even if the object has not been accessioned.
19
+ # During local development, we need a way to open a new version even if the object has not been accessioned.
21
20
  raise(Dor::Exception, 'Object net yet accessioned') unless
22
- opts[:assume_accessioned] ||
23
- Dor::WorkflowService.get_lifecycle('dor', pid, 'accessioned')
24
-
25
- raise Dor::Exception, 'Object already opened for versioning' if(new_version_open?)
26
- raise Dor::Exception, 'Object currently being accessioned' if(Dor::WorkflowService.get_active_lifecycle('dor', pid, 'submitted'))
21
+ opts[:assume_accessioned] || Dor::WorkflowService.get_lifecycle('dor', pid, 'accessioned')
22
+ raise Dor::Exception, 'Object already opened for versioning' if new_version_open?
23
+ raise Dor::Exception, 'Object currently being accessioned' if Dor::WorkflowService.get_active_lifecycle('dor', pid, 'submitted')
27
24
 
28
25
  sdr_version = Sdr::Client.current_version pid
29
26
 
@@ -33,20 +30,18 @@ module Dor
33
30
  vmd_ds.save unless self.new_object?
34
31
 
35
32
  k = :create_workflows_ds
36
- if opts.has_key?(k)
37
- # During local development, Hydrus (or some other app running Fedora locally)
38
- # does not want this call to initialize the workflows datastream.
33
+ if opts.key?(k)
34
+ # During local development, Hydrus (or another app w/ local Fedora) does not want to initialize workflows datastream.
39
35
  initialize_workflow('versioningWF', opts[k])
40
36
  else
41
37
  initialize_workflow('versioningWF')
42
38
  end
43
39
 
44
40
  vmd_upd_info = opts[:vers_md_upd_info]
45
- if vmd_upd_info
46
- datastreams['events'].add_event("open", vmd_upd_info[:opening_user_name], "Version #{vmd_ds.current_version_id.to_s} opened")
47
- vmd_ds.update_current_version({:description => vmd_upd_info[:description], :significance => vmd_upd_info[:significance].to_sym})
48
- save
49
- end
41
+ return unless vmd_upd_info
42
+ datastreams['events'].add_event("open", vmd_upd_info[:opening_user_name], "Version #{vmd_ds.current_version_id} opened")
43
+ vmd_ds.update_current_version({:description => vmd_upd_info[:description], :significance => vmd_upd_info[:significance].to_sym})
44
+ save
50
45
  end
51
46
 
52
47
  def current_version
@@ -63,27 +58,27 @@ module Dor
63
58
  # @raise [Dor::Exception] if the object hasn't been opened for versioning, or if accessionWF has
64
59
  # already been instantiated or the current version is missing a tag or description
65
60
  def close_version(opts={})
66
- unless(opts.empty?)
61
+ unless opts.empty?
67
62
  datastreams['versionMetadata'].update_current_version opts
68
63
  datastreams['versionMetadata'].save
69
64
  end
70
65
 
71
- raise Dor::Exception, 'latest version in versionMetadata requires tag and description before it can be closed' unless(datastreams['versionMetadata'].current_version_closeable?)
72
- raise Dor::Exception, 'Trying to close version on an object not opened for versioning' unless(new_version_open?)
73
- raise Dor::Exception, 'accessionWF already created for versioned object' if(Dor::WorkflowService.get_active_lifecycle('dor', pid, 'submitted'))
66
+ raise Dor::Exception, 'latest version in versionMetadata requires tag and description before it can be closed' unless datastreams['versionMetadata'].current_version_closeable?
67
+ raise Dor::Exception, 'Trying to close version on an object not opened for versioning' unless new_version_open?
68
+ raise Dor::Exception, 'accessionWF already created for versioned object' if Dor::WorkflowService.get_active_lifecycle('dor', pid, 'submitted')
74
69
 
75
70
  Dor::WorkflowService.close_version 'dor', pid, opts.fetch(:start_accession, true) # Default to creating accessionWF when calling close_version
76
71
  end
77
72
 
78
73
  # @return [Boolean] true if 'opened' lifecycle is active, false otherwise
79
74
  def new_version_open?
80
- return true if(Dor::WorkflowService.get_active_lifecycle('dor', pid, 'opened'))
75
+ return true if Dor::WorkflowService.get_active_lifecycle('dor', pid, 'opened')
81
76
  false
82
77
  end
83
78
 
84
79
  # @return [Boolean] true if the object is in a state that allows it to be modified. States that will allow modification are: has not been submitted for accessioning, has an open version or has sdr-ingest set to hold
85
80
  def allows_modification?
86
- if Dor::WorkflowService.get_lifecycle('dor', pid, 'submitted') && ! new_version_open? && Dor::WorkflowService.get_workflow_status('dor', pid, 'accessionWF', 'sdr-ingest-transfer')!='hold'
81
+ if Dor::WorkflowService.get_lifecycle('dor', pid, 'submitted') && !new_version_open? && Dor::WorkflowService.get_workflow_status('dor', pid, 'accessionWF', 'sdr-ingest-transfer')!='hold'
87
82
  false
88
83
  else
89
84
  true
@@ -12,7 +12,7 @@ module Dor
12
12
  has_metadata :name => "workflowDefinition", :type => Dor::WorkflowDefinitionDs, :label => 'Workflow Definition'
13
13
 
14
14
  def self.find_by_name(name, opts={})
15
- Dor.find_all(%{#{Solrizer.solr_name "objectType", :symbol}:"#{self.object_type}" #{Solrizer.solr_name "workflow_name", :symbol}:"#{name}"}, opts).first
15
+ Dor.find_all(%{#{Solrizer.solr_name 'objectType', :symbol}:"#{self.object_type}" #{Solrizer.solr_name 'workflow_name', :symbol}:"#{name}"}, opts).first
16
16
  end
17
17
 
18
18
  # Searches for the workflow definition object in DOR, then
@@ -21,7 +21,7 @@ module Dor
21
21
  # @param [String] name the name of the workflow
22
22
  # @return [String] the initial workflow xml
23
23
  def self.initial_workflow(name)
24
- return @@xml_cache[name] if(@@xml_cache.include?(name))
24
+ return @@xml_cache[name] if @@xml_cache.include?(name)
25
25
 
26
26
  self.find_and_cache_workflow_xml_and_repo name
27
27
  @@xml_cache[name]
@@ -32,7 +32,7 @@ module Dor
32
32
  # @param [String] name the name of the workflow
33
33
  # @return [String] the initial workflow xml
34
34
  def self.initial_repo(name)
35
- return @@repo_cache[name] if(@@repo_cache.include?(name))
35
+ return @@repo_cache[name] if @@repo_cache.include?(name)
36
36
 
37
37
  self.find_and_cache_workflow_xml_and_repo name
38
38
  @@repo_cache[name]
@@ -4,27 +4,27 @@ module Dor
4
4
 
5
5
  # Remove all traces of the object's data files from the workspace and export areas
6
6
  class CleanupResetService
7
-
7
+
8
8
  # @param [String] druid The identifier for the object whose reset data is to be removed
9
9
  # @return [void] remove copy of the reset data that was exported to preservation core
10
10
  def self.cleanup_by_reset_druid(druid)
11
- last_version = get_druid_last_version(druid)
11
+ last_version = get_druid_last_version(druid)
12
12
  cleanup_reset_workspace_content(druid, last_version, Config.cleanup.local_workspace_root)
13
- cleanup_reset_workspace_content(druid, last_version, Config.cleanup.local_assembly_root)
13
+ cleanup_assembly_content(druid, Config.cleanup.local_assembly_root)
14
14
  cleanup_reset_export(druid, last_version)
15
15
  end
16
-
16
+
17
17
  def self.get_druid_last_version(druid)
18
18
  druid_obj = Dor::Item.find(druid)
19
19
  last_version = druid_obj.current_version.to_i
20
-
20
+
21
21
  #if the current version is still open, avoid this versioned directory
22
- if Dor::WorkflowService.get_lifecycle('dor', druid, 'accessioned').nil? then
23
- last_version = last_version - 1
22
+ if Dor::WorkflowService.get_lifecycle('dor', druid, 'accessioned').nil? then
23
+ last_version -= 1
24
24
  end
25
25
  return last_version
26
26
  end
27
-
27
+
28
28
  # @param [String] druid The identifier for the object whose reset data is to be removed
29
29
  # @param [String] base The base directory to delete from
30
30
  # @param [Integer] last_version The last version that the data should be removed until version 1
@@ -32,27 +32,26 @@ module Dor
32
32
  def self.cleanup_reset_workspace_content(druid,last_version, base)
33
33
  base_druid = DruidTools::Druid.new(druid, base)
34
34
  base_druid_tree = base_druid.pathname.to_s
35
- #if it is truncated tree /aa/111/aaa/1111/content,
35
+ #if it is truncated tree /aa/111/aaa/1111/content,
36
36
  #we should follow the regular cleanup technique
37
37
 
38
38
  reset_directories = get_reset_dir_list(last_version, base_druid_tree)
39
- reset_directories.each do |path|
39
+ reset_directories.each do |path|
40
40
  FileUtils.rm_rf(path)
41
41
  end
42
42
  base_druid.prune_ancestors(base_druid.pathname.parent)
43
43
  end
44
-
45
-
44
+
46
45
  # @param [String] base_druid_tree The base directory to delete from
47
46
  # @param [Integer] last_version The last version that the data should be removed until version 1
48
47
  # @return [void] prepares a list of reset directories that should be removed
49
48
  def self.get_reset_dir_list(last_version, base_druid_tree)
50
- reset_directories = []
51
- for i in 1..last_version
49
+ reset_directories = []
50
+ for i in 1..last_version
52
51
  reset_path = "#{base_druid_tree}_v#{i}"
53
- reset_directories.append(reset_path) if File.exists?(reset_path)
52
+ reset_directories.append(reset_path) if File.exists?(reset_path)
54
53
  end
55
- return reset_directories
54
+ return reset_directories
56
55
  end
57
56
 
58
57
  # @param [String] druid The identifier for the object whose reset bags data is to be removed
@@ -60,41 +59,47 @@ module Dor
60
59
  def self.cleanup_reset_export(druid, last_version)
61
60
  id = druid.split(':').last
62
61
  base_bag_directory = File.join(Config.cleanup.local_export_home, id)
63
-
62
+
64
63
  bag_dir_list = get_reset_bag_dir_list(last_version, base_bag_directory)
65
- bag_dir_list.each do |bag_dir|
64
+ bag_dir_list.each do |bag_dir|
66
65
  Pathname(bag_dir).rmtree
67
66
  end
68
-
67
+
69
68
  bag_tar_list = get_reset_bag_tar_list(last_version, base_bag_directory)
70
- bag_tar_list.each do |bag_tar|
69
+ bag_tar_list.each do |bag_tar|
71
70
  Pathname(bag_tar).rmtree
72
71
  end
73
72
  end
74
-
73
+
75
74
  # @param [Integer] last_version The last version that the data should be removed until version 1
76
75
  # @param [String] base_bag_directory The base bag directory including the export home and druid id
77
76
  # @return [void] prepares a list of reset bag directories that should be removed
78
77
  def self.get_reset_bag_dir_list(last_version, base_bag_directory)
79
- reset_bags = []
78
+ reset_bags = []
80
79
  for i in 1..last_version do
81
80
  reset_path = "#{base_bag_directory}_v#{i}"
82
- reset_bags.append(reset_path) if File.exists?(reset_path)
81
+ reset_bags.append(reset_path) if File.exists?(reset_path)
83
82
  end
84
- return reset_bags
83
+ return reset_bags
85
84
  end
86
85
 
87
86
  # @param [String] base_bag_directory The base bag directory including the export home and druid id
88
87
  # @param [Integer] last_version The last version that the data should be removed until version 1
89
88
  # @return [void] prepares a list of reset bag tars that should be removed
90
89
  def self.get_reset_bag_tar_list(last_version, base_bag_directory)
91
- reset_bags = []
90
+ reset_bags = []
92
91
  for i in 1..last_version do
93
92
  reset_path = "#{base_bag_directory}_v#{i}.tar"
94
- reset_bags.append(reset_path) if File.exists?(reset_path)
93
+ reset_bags.append(reset_path) if File.exists?(reset_path)
95
94
  end
96
- return reset_bags
95
+ return reset_bags
97
96
  end
98
97
 
98
+ # @param [String] druid The identifier for the object whose data is to be removed
99
+ # @param [String] base The base directory to delete from
100
+ # @return [void] remove the object's data files from the assembly area
101
+ def self.cleanup_assembly_content(druid, base)
102
+ DruidTools::Druid.new(druid, base).prune!
103
+ end
99
104
  end
100
105
  end
@@ -24,7 +24,7 @@ module Dor
24
24
  # @param [Moab::FileSignature] moab_signature The fixity values of the file
25
25
  # @return [Boolean] true if file deleted, false otherwise
26
26
  def self.delete_file(file_pathname, moab_signature)
27
- if file_pathname.exist? and (file_pathname.size == moab_signature.size)
27
+ if file_pathname.exist? && (file_pathname.size == moab_signature.size)
28
28
  file_signature = Moab::FileSignature.new.signature_from_file(file_pathname)
29
29
  if (file_signature == moab_signature)
30
30
  file_pathname.delete
@@ -65,7 +65,7 @@ module Dor
65
65
  # @param [Moab::FileSignature] moab_signature The fixity values of the file
66
66
  # @return [Boolean] true if file renamed, false otherwise
67
67
  def self.rename_file(old_pathname, new_pathname, moab_signature)
68
- if old_pathname.exist? and (old_pathname.size == moab_signature.size)
68
+ if old_pathname.exist? && (old_pathname.size == moab_signature.size)
69
69
  file_signature = Moab::FileSignature.new.signature_from_file(old_pathname)
70
70
  if (file_signature == moab_signature)
71
71
  new_pathname.parent.mkpath
@@ -82,7 +82,7 @@ module Dor
82
82
  # @param [Moab::FileGroupDifference] content_diff the content file version differences report
83
83
  def self.shelve_to_stacks(workspace_content_pathname, stacks_object_pathname, content_diff)
84
84
  return false if workspace_content_pathname.nil?
85
- [:added, :copyadded, :modified,].each do |change_type|
85
+ [:added, :copyadded, :modified].each do |change_type|
86
86
  subset = content_diff.subset(change_type) # {Moab::FileGroupDifferenceSubset
87
87
  subset.files.each do |moab_file| # {Moab::FileInstanceDifference}
88
88
  moab_signature = moab_file.signatures.last # {Moab::FileSignature}
@@ -2,7 +2,7 @@ module Dor
2
2
 
3
3
  class MergeService
4
4
 
5
- def MergeService.merge_into_primary primary_druid, secondary_druids, tag, logger = nil
5
+ def self.merge_into_primary primary_druid, secondary_druids, tag, logger = nil
6
6
  # TODO test the secondary_obj to see if we've processed it already
7
7
  merge_service = Dor::MergeService.new primary_druid, secondary_druids, tag, logger
8
8
  merge_service.check_objects_editable
@@ -24,12 +24,9 @@ module Dor
24
24
  end
25
25
 
26
26
  def check_objects_editable
27
- unless @primary.allows_modification?
28
- raise Dor::Exception.new "Primary object is not editable: #{@primary.pid}"
29
- end
30
- if ( non_editable = (@secondary_objs.detect {|obj| ! obj.allows_modification? } ))
31
- raise Dor::Exception.new "Secondary object is not editable: #{non_editable.pid}"
32
- end
27
+ raise Dor::Exception.new("Primary object is not editable: #{@primary.pid}") unless @primary.allows_modification?
28
+ non_editable = @secondary_objs.detect {|obj| !obj.allows_modification? }
29
+ raise Dor::Exception.new "Secondary object is not editable: #{non_editable.pid}" if non_editable
33
30
  end
34
31
 
35
32
  def move_metadata_and_content
@@ -47,7 +44,6 @@ module Dor
47
44
  primary_cm = @primary.contentMetadata.ng_xml
48
45
 
49
46
  @secondary_objs.each do |secondary|
50
-
51
47
  sec_druid = DruidTools::Druid.new secondary.pid, Dor::Config.stacks.local_workspace_root
52
48
  secondary.contentMetadata.ng_xml.xpath("//resource").each do |src_resource|
53
49
  primary_resource = primary_cm.at_xpath "//resource[attr[@name = 'mergedFromPid']/text() = '#{secondary.pid}' and
@@ -13,7 +13,7 @@ handler = Class.new do
13
13
  end
14
14
 
15
15
  def prefixes
16
- ['catkey','barcode']
16
+ %w(catkey barcode)
17
17
  end
18
18
  end
19
19
 
@@ -21,20 +21,18 @@ handler = Class.new do
21
21
 
22
22
  def label(metadata)
23
23
  xml = Nokogiri::XML(metadata)
24
- if xml.root.nil?
25
- return ""
26
- end
24
+ return "" if xml.root.nil?
27
25
  case xml.root.name
28
26
  when 'msDesc' then xml.xpath('/msDesc/msIdentifier/collection').text
29
- when 'mods' then
27
+ when 'mods' then
30
28
  xml.root.add_namespace_definition('mods','http://www.loc.gov/mods/v3')
31
29
  xml.xpath('/mods:mods/mods:titleInfo[1]').xpath('mods:title|mods:nonSort').collect { |n| n.text }.join(' ').strip
32
30
  end
33
31
  end
34
32
 
35
33
  def prefixes
36
- ['mdtoolkit','druid']
34
+ %w(mdtoolkit druid)
37
35
  end
38
36
  end
39
37
 
40
- Dor::MetadataService.register(handler)
38
+ Dor::MetadataService.register(handler)
@@ -3,28 +3,28 @@ require 'cache'
3
3
  module Dor
4
4
 
5
5
  class MetadataError < Exception ; end
6
-
6
+
7
7
  # class MetadataHandler
8
- #
8
+ #
9
9
  # def fetch(prefix, identifier)
10
10
  # ### Return metadata for prefix/identifier combo
11
11
  # end
12
- #
12
+ #
13
13
  # def label(metadata)
14
14
  # ### Return a Fedora-compatible label from the metadata format returned by #fetch
15
15
  # end
16
- #
16
+ #
17
17
  # end
18
-
18
+
19
19
  class MetadataService
20
-
20
+
21
21
  class << self
22
22
  @@cache = Cache.new(nil, nil, 250, 300)
23
-
23
+
24
24
  def register(handler_class)
25
- ['fetch', 'label', 'prefixes'].each do |method|
26
- unless handler_class.instance_methods.include?(method) || handler_class.instance_methods.include?(method.to_sym)
27
- raise TypeError, "Metadata handlers must define ##{method.to_s}"
25
+ %w(fetch label prefixes).each do |method|
26
+ unless handler_class.instance_methods.include?(method) || handler_class.instance_methods.include?(method.to_sym)
27
+ raise TypeError, "Metadata handlers must define ##{method}"
28
28
  end
29
29
  end
30
30
  handler = handler_class.new
@@ -33,21 +33,21 @@ module Dor
33
33
  end
34
34
  return handler
35
35
  end
36
-
36
+
37
37
  def known_prefixes
38
38
  return handlers.keys
39
39
  end
40
-
40
+
41
41
  def can_resolve?(identifier)
42
42
  (prefix, identifier) = identifier.split(/:/,2)
43
43
  handlers.keys.include?(prefix.to_sym)
44
44
  end
45
-
45
+
46
46
  # TODO: Return a prioritized list
47
47
  def resolvable(identifiers)
48
48
  identifiers.select { |identifier| self.can_resolve?(identifier) }
49
49
  end
50
-
50
+
51
51
  def fetch(identifier)
52
52
  @@cache.fetch(identifier) do
53
53
  (prefix, identifier) = identifier.split(/:/,2)
@@ -61,24 +61,22 @@ module Dor
61
61
  handler = handler_for(prefix)
62
62
  handler.label(handler.fetch(prefix, identifier))
63
63
  end
64
-
64
+
65
65
  def handler_for(prefix)
66
66
  handler = handlers[prefix.to_sym]
67
- if handler.nil?
68
- raise MetadataError, "Unkown metadata prefix: #{prefix}"
69
- end
67
+ raise MetadataError, "Unkown metadata prefix: #{prefix}" if handler.nil?
70
68
  return handler
71
69
  end
72
-
70
+
73
71
  private
74
72
  def handlers
75
73
  @handlers ||= {}
76
74
  end
77
-
75
+
78
76
  end
79
-
77
+
80
78
  end
81
-
79
+
82
80
  end
83
81
 
84
82
  Dir[File.join(File.dirname(__FILE__),'metadata_handlers','*.rb')].each { |handler_file|