dor-services 4.22.3 → 4.22.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +8 -8
  2. data/bin/dor-indexer +19 -20
  3. data/bin/dor-indexerd +2 -3
  4. data/config/certs/robots-dor-dev.crt +29 -0
  5. data/config/certs/robots-dor-dev.key +27 -0
  6. data/config/dev_console_env.rb +77 -0
  7. data/lib/dor-services.rb +3 -3
  8. data/lib/dor/config.rb +15 -15
  9. data/lib/dor/datastreams/administrative_metadata_ds.rb +5 -5
  10. data/lib/dor/datastreams/content_metadata_ds.rb +181 -225
  11. data/lib/dor/datastreams/datastream_spec_solrizer.rb +1 -1
  12. data/lib/dor/datastreams/default_object_rights_ds.rb +8 -10
  13. data/lib/dor/datastreams/desc_metadata_ds.rb +35 -34
  14. data/lib/dor/datastreams/embargo_metadata_ds.rb +7 -7
  15. data/lib/dor/datastreams/events_ds.rb +11 -11
  16. data/lib/dor/datastreams/geo_metadata_ds.rb +86 -86
  17. data/lib/dor/datastreams/identity_metadata_ds.rb +19 -19
  18. data/lib/dor/datastreams/role_metadata_ds.rb +3 -3
  19. data/lib/dor/datastreams/simple_dublin_core_ds.rb +13 -13
  20. data/lib/dor/datastreams/version_metadata_ds.rb +5 -5
  21. data/lib/dor/datastreams/workflow_definition_ds.rb +21 -21
  22. data/lib/dor/migrations/identifiable/assert_adminPolicy.rb +1 -1
  23. data/lib/dor/migrations/identifiable/fix_model_assertions.rb +1 -1
  24. data/lib/dor/migrations/identifiable/record_remediation.rb +2 -2
  25. data/lib/dor/migrations/identifiable/uriify_augmented_contentlocation_refs.rb +1 -1
  26. data/lib/dor/migrations/identifiable/uriify_contentlocation_refs.rb +1 -1
  27. data/lib/dor/migrations/processable/unify_workflows.rb +4 -4
  28. data/lib/dor/migrations/versionable/add_missing_version_md.rb +1 -1
  29. data/lib/dor/models/admin_policy_object.rb +1 -1
  30. data/lib/dor/models/assembleable.rb +5 -5
  31. data/lib/dor/models/contentable.rb +27 -27
  32. data/lib/dor/models/describable.rb +168 -179
  33. data/lib/dor/models/discoverable.rb +13 -13
  34. data/lib/dor/models/editable.rb +55 -55
  35. data/lib/dor/models/embargoable.rb +26 -26
  36. data/lib/dor/models/eventable.rb +3 -3
  37. data/lib/dor/models/geoable.rb +8 -8
  38. data/lib/dor/models/governable.rb +14 -14
  39. data/lib/dor/models/identifiable.rb +117 -143
  40. data/lib/dor/models/item.rb +2 -2
  41. data/lib/dor/models/itemizable.rb +9 -9
  42. data/lib/dor/models/presentable.rb +8 -8
  43. data/lib/dor/models/preservable.rb +4 -4
  44. data/lib/dor/models/processable.rb +22 -23
  45. data/lib/dor/models/releaseable.rb +26 -26
  46. data/lib/dor/models/shelvable.rb +14 -14
  47. data/lib/dor/models/upgradable.rb +13 -13
  48. data/lib/dor/models/versionable.rb +2 -2
  49. data/lib/dor/models/workflow_object.rb +4 -4
  50. data/lib/dor/services/cleanup_reset_service.rb +27 -27
  51. data/lib/dor/services/cleanup_service.rb +4 -7
  52. data/lib/dor/services/digital_stacks_service.rb +10 -10
  53. data/lib/dor/services/merge_service.rb +1 -1
  54. data/lib/dor/services/metadata_handlers/mdtoolkit_handler.rb +2 -2
  55. data/lib/dor/services/metadata_service.rb +20 -20
  56. data/lib/dor/services/registration_service.rb +27 -27
  57. data/lib/dor/services/reset_workspace_service.rb +15 -15
  58. data/lib/dor/services/sdr_ingest_service.rb +6 -6
  59. data/lib/dor/services/search_service.rb +2 -2
  60. data/lib/dor/services/suri_service.rb +5 -5
  61. data/lib/dor/services/technical_metadata_service.rb +2 -3
  62. data/lib/dor/utils/ng_tidy.rb +9 -9
  63. data/lib/dor/utils/predicate_patch.rb +1 -1
  64. data/lib/dor/utils/solr_doc_helper.rb +2 -2
  65. data/lib/dor/version.rb +1 -1
  66. data/lib/dor/workflow/document.rb +19 -19
  67. data/lib/dor/workflow/graph.rb +36 -36
  68. data/lib/dor/workflow/process.rb +12 -12
  69. data/lib/tasks/dor.rake +1 -1
  70. data/lib/tasks/rdoc.rake +3 -3
  71. metadata +6 -3
@@ -63,7 +63,7 @@ module Dor
63
63
  # @option opts [Boolean] :start_accesion set to true if you want accessioning to start (default), false otherwise
64
64
  # @raise [Dor::Exception] if the object hasn't been opened for versioning, or if accessionWF has
65
65
  # already been instantiated or the current version is missing a tag or description
66
- def close_version(opts={})
66
+ def close_version(opts = {})
67
67
  unless(opts.empty?)
68
68
  datastreams['versionMetadata'].update_current_version opts
69
69
  datastreams['versionMetadata'].save
@@ -84,7 +84,7 @@ module Dor
84
84
 
85
85
  # @return [Boolean] true if the object is in a state that allows it to be modified. States that will allow modification are: has not been submitted for accessioning, has an open version or has sdr-ingest set to hold
86
86
  def allows_modification?
87
- if Dor::WorkflowService.get_lifecycle('dor', pid, 'submitted' ) and not new_version_open? and not Dor::WorkflowService.get_workflow_status('dor', pid, 'accessionWF', 'sdr-ingest-transfer')=='hold'
87
+ if Dor::WorkflowService.get_lifecycle('dor', pid, 'submitted' ) && !new_version_open? && Dor::WorkflowService.get_workflow_status('dor', pid, 'accessionWF', 'sdr-ingest-transfer') != 'hold'
88
88
  false
89
89
  else
90
90
  true
@@ -12,7 +12,7 @@ module Dor
12
12
  has_metadata :name => "workflowDefinition", :type => Dor::WorkflowDefinitionDs, :label => 'Workflow Definition'
13
13
 
14
14
  def self.find_by_name(name, opts = {})
15
- Dor.find_all(%{objectType_t:"#{self.object_type}" workflow_name_s:"#{name}"}, opts).first
15
+ Dor.find_all(%{objectType_t:"#{object_type}" workflow_name_s:"#{name}"}, opts).first
16
16
  end
17
17
 
18
18
  # Searches for the workflow definition object in DOR, then
@@ -22,7 +22,7 @@ module Dor
22
22
  # @return [String] the initial workflow xml
23
23
  def self.initial_workflow(name)
24
24
  return @@xml_cache[name] if @@xml_cache.include?(name)
25
- self.find_and_cache_workflow_xml_and_repo name
25
+ find_and_cache_workflow_xml_and_repo name
26
26
  @@xml_cache[name]
27
27
  end
28
28
 
@@ -32,7 +32,7 @@ module Dor
32
32
  # @return [String] the initial workflow xml
33
33
  def self.initial_repo(name)
34
34
  return @@repo_cache[name] if @@repo_cache.include?(name)
35
- self.find_and_cache_workflow_xml_and_repo name
35
+ find_and_cache_workflow_xml_and_repo name
36
36
  @@repo_cache[name]
37
37
  end
38
38
 
@@ -41,7 +41,7 @@ module Dor
41
41
  end
42
42
 
43
43
  def graph(*args)
44
- self.definition.graph *args
44
+ definition.graph *args
45
45
  end
46
46
 
47
47
  def to_solr(solr_doc = {}, *args)
@@ -4,27 +4,27 @@ module Dor
4
4
 
5
5
  # Remove all traces of the object's data files from the workspace and export areas
6
6
  class CleanupResetService
7
-
7
+
8
8
  # @param [String] druid The identifier for the object whose reset data is to be removed
9
9
  # @return [void] remove copy of the reset data that was exported to preservation core
10
10
  def self.cleanup_by_reset_druid(druid)
11
- last_version = get_druid_last_version(druid)
11
+ last_version = get_druid_last_version(druid)
12
12
  cleanup_reset_workspace_content(druid, last_version, Config.cleanup.local_workspace_root)
13
13
  cleanup_assembly_content(druid, Config.cleanup.local_assembly_root)
14
14
  cleanup_reset_export(druid, last_version)
15
15
  end
16
-
16
+
17
17
  def self.get_druid_last_version(druid)
18
18
  druid_obj = Dor::Item.find(druid)
19
19
  last_version = druid_obj.current_version.to_i
20
-
20
+
21
21
  #if the current version is still open, avoid this versioned directory
22
- if Dor::WorkflowService.get_lifecycle('dor', druid, 'accessioned').nil? then
23
- last_version = last_version - 1
22
+ if Dor::WorkflowService.get_lifecycle('dor', druid, 'accessioned').nil? then
23
+ last_version -= 1
24
24
  end
25
- return last_version
25
+ last_version
26
26
  end
27
-
27
+
28
28
  # @param [String] druid The identifier for the object whose reset data is to be removed
29
29
  # @param [String] base The base directory to delete from
30
30
  # @param [Integer] last_version The last version that the data should be removed until version 1
@@ -32,28 +32,28 @@ module Dor
32
32
  def self.cleanup_reset_workspace_content(druid,last_version, base)
33
33
  base_druid = DruidTools::Druid.new(druid, base)
34
34
  base_druid_tree = base_druid.pathname.to_s
35
- #if it is truncated tree /aa/111/aaa/1111/content,
35
+ #if it is truncated tree /aa/111/aaa/1111/content,
36
36
  #we should follow the regular cleanup technique
37
37
 
38
38
  reset_directories = get_reset_dir_list(last_version, base_druid_tree)
39
- reset_directories.each do |path|
39
+ reset_directories.each do |path|
40
40
  FileUtils.rm_rf(path)
41
41
  end
42
42
  base_druid.prune_ancestors(base_druid.pathname.parent)
43
43
  end
44
-
45
-
44
+
45
+
46
46
  # @param [String] druid The identifier for the object whose reset data is to be removed
47
47
  # @param [String] base The base directory to delete from
48
48
  # @param [Integer] last_version The last version that the data should be removed until version 1
49
49
  # @return [void] prepares a list of reset directories that should be removed
50
50
  def self.get_reset_dir_list(last_version, base_druid_tree)
51
- reset_directories = []
52
- for i in 1..last_version
51
+ reset_directories = []
52
+ for i in 1..last_version
53
53
  reset_path = "#{base_druid_tree}_v#{i}"
54
- reset_directories.append(reset_path) if File.exists?(reset_path)
54
+ reset_directories.append(reset_path) if File.exists?(reset_path)
55
55
  end
56
- return reset_directories
56
+ reset_directories
57
57
  end
58
58
 
59
59
  # @param [String] druid The identifier for the object whose reset bags data is to be removed
@@ -61,40 +61,40 @@ module Dor
61
61
  def self.cleanup_reset_export(druid, last_version)
62
62
  id = druid.split(':').last
63
63
  base_bag_directory = File.join(Config.cleanup.local_export_home, id)
64
-
64
+
65
65
  bag_dir_list = get_reset_bag_dir_list(last_version, base_bag_directory)
66
- bag_dir_list.each do |bag_dir|
66
+ bag_dir_list.each do |bag_dir|
67
67
  Pathname(bag_dir).rmtree
68
68
  end
69
-
69
+
70
70
  bag_tar_list = get_reset_bag_tar_list(last_version, base_bag_directory)
71
- bag_tar_list.each do |bag_tar|
71
+ bag_tar_list.each do |bag_tar|
72
72
  Pathname(bag_tar).rmtree
73
73
  end
74
74
  end
75
-
75
+
76
76
  # @param [Integer] last_version The last version that the data should be removed until version 1
77
77
  # @param [String] base_bag_directory The base bag directory including the export home and druid id
78
78
  # @return [void] prepares a list of reset bag directories that should be removed
79
79
  def self.get_reset_bag_dir_list(last_version, base_bag_directory)
80
- reset_bags = []
80
+ reset_bags = []
81
81
  for i in 1..last_version do
82
82
  reset_path = "#{base_bag_directory}_v#{i}"
83
- reset_bags.append(reset_path) if File.exists?(reset_path)
83
+ reset_bags.append(reset_path) if File.exists?(reset_path)
84
84
  end
85
- return reset_bags
85
+ reset_bags
86
86
  end
87
87
 
88
88
  # @param [String] base_bag_directory The base bag directory including the export home and druid id
89
89
  # @param [Integer] last_version The last version that the data should be removed until version 1
90
90
  # @return [void] prepares a list of reset bag tars that should be removed
91
91
  def self.get_reset_bag_tar_list(last_version, base_bag_directory)
92
- reset_bags = []
92
+ reset_bags = []
93
93
  for i in 1..last_version do
94
94
  reset_path = "#{base_bag_directory}_v#{i}.tar"
95
- reset_bags.append(reset_path) if File.exists?(reset_path)
95
+ reset_bags.append(reset_path) if File.exists?(reset_path)
96
96
  end
97
- return reset_bags
97
+ reset_bags
98
98
  end
99
99
 
100
100
  # @param [String] druid The identifier for the object whose data is to be removed
@@ -24,15 +24,15 @@ module Dor
24
24
  def self.cleanup_workspace_content(druid, base)
25
25
  DruidTools::Druid.new(druid, base).prune!
26
26
  end
27
-
27
+
28
28
  # @param [String] druid The identifier for the object whose data is to be removed
29
29
  # @return [void] remove copy of the data that was exported to preservation core
30
30
  def self.cleanup_export(druid)
31
31
  id = druid.split(':').last
32
32
  bag_dir = File.join(Config.cleanup.local_export_home, id)
33
- self.remove_branch(bag_dir)
33
+ remove_branch(bag_dir)
34
34
  tarfile = "#{bag_dir}.tar"
35
- self.remove_branch(tarfile)
35
+ remove_branch(tarfile)
36
36
  end
37
37
 
38
38
  # @param [Pathname,String] pathname The full path of the branch to be removed
@@ -83,9 +83,6 @@ module Dor
83
83
  Dor::SearchService.solr.delete_by_id(pid)
84
84
  Dor::SearchService.solr.commit
85
85
  end
86
- end
86
+ end
87
87
 
88
88
  end
89
-
90
-
91
-
@@ -14,7 +14,7 @@ module Dor
14
14
  subset.files.each do |moab_file| # {Moab::FileInstanceDifference}
15
15
  moab_signature = moab_file.signatures.first # {Moab::FileSignature}
16
16
  file_pathname = stacks_object_pathname.join(moab_file.basis_path)
17
- self.delete_file(file_pathname, moab_signature)
17
+ delete_file(file_pathname, moab_signature)
18
18
  end
19
19
  end
20
20
  end
@@ -24,14 +24,14 @@ module Dor
24
24
  # @param [Moab::FileSignature] moab_signature The fixity values of the file
25
25
  # @return [Boolean] true if file deleted, false otherwise
26
26
  def self.delete_file(file_pathname, moab_signature)
27
- if file_pathname.exist? and (file_pathname.size == moab_signature.size)
27
+ if file_pathname.exist? && (file_pathname.size == moab_signature.size)
28
28
  file_signature = Moab::FileSignature.new.signature_from_file(file_pathname)
29
29
  if (file_signature == moab_signature)
30
30
  file_pathname.delete
31
31
  return true
32
32
  end
33
33
  end
34
- return false
34
+ false
35
35
  end
36
36
 
37
37
  # Rename files from stacks that have change type 'renamed' using an intermediate temp filename.
@@ -46,7 +46,7 @@ module Dor
46
46
  moab_signature = moab_file.signatures.first # {Moab::FileSignature}
47
47
  original_pathname = stacks_object_pathname.join(moab_file.basis_path)
48
48
  temp_pathname = stacks_object_pathname.join(moab_signature.checksums.values.last)
49
- self.rename_file(original_pathname, temp_pathname, moab_signature)
49
+ rename_file(original_pathname, temp_pathname, moab_signature)
50
50
  end
51
51
 
52
52
  # 2nd Pass - rename files from checksum-based name to new name
@@ -54,7 +54,7 @@ module Dor
54
54
  moab_signature = moab_file.signatures.first # {Moab::FileSignature}
55
55
  temp_pathname = stacks_object_pathname.join(moab_signature.checksums.values.last)
56
56
  new_pathname = stacks_object_pathname.join(moab_file.other_path)
57
- self.rename_file(temp_pathname, new_pathname, moab_signature)
57
+ rename_file(temp_pathname, new_pathname, moab_signature)
58
58
  end
59
59
 
60
60
  end
@@ -65,7 +65,7 @@ module Dor
65
65
  # @param [Moab::FileSignature] moab_signature The fixity values of the file
66
66
  # @return [Boolean] true if file renamed, false otherwise
67
67
  def self.rename_file(old_pathname, new_pathname, moab_signature)
68
- if old_pathname.exist? and (old_pathname.size == moab_signature.size)
68
+ if old_pathname.exist? && (old_pathname.size == moab_signature.size)
69
69
  file_signature = Moab::FileSignature.new.signature_from_file(old_pathname)
70
70
  if (file_signature == moab_signature)
71
71
  new_pathname.parent.mkpath
@@ -73,7 +73,7 @@ module Dor
73
73
  return true
74
74
  end
75
75
  end
76
- return false
76
+ false
77
77
  end
78
78
 
79
79
  # Add files to stacks that have change type 'added', 'copyadded' or 'modified'.
@@ -82,14 +82,14 @@ module Dor
82
82
  # @param [Moab::FileGroupDifference] content_diff the content file version differences report
83
83
  def self.shelve_to_stacks(workspace_content_pathname, stacks_object_pathname, content_diff)
84
84
  return false if workspace_content_pathname.nil?
85
- [:added, :copyadded, :modified,].each do |change_type|
85
+ [:added, :copyadded, :modified].each do |change_type|
86
86
  subset = content_diff.subset(change_type) # {Moab::FileGroupDifferenceSubset
87
87
  subset.files.each do |moab_file| # {Moab::FileInstanceDifference}
88
88
  moab_signature = moab_file.signatures.last # {Moab::FileSignature}
89
89
  filename = (change_type == :modified) ? moab_file.basis_path : moab_file.other_path
90
90
  workspace_pathname = workspace_content_pathname.join(filename)
91
91
  stacks_pathname = stacks_object_pathname.join(filename)
92
- self.copy_file(workspace_pathname, stacks_pathname, moab_signature)
92
+ copy_file(workspace_pathname, stacks_pathname, moab_signature)
93
93
  end
94
94
  end
95
95
  true
@@ -110,7 +110,7 @@ module Dor
110
110
  FileUtils.cp workspace_pathname.to_s, stacks_pathname.to_s
111
111
  return true
112
112
  end
113
- return false
113
+ false
114
114
  end
115
115
 
116
116
  ### depricated ???
@@ -27,7 +27,7 @@ module Dor
27
27
  unless @primary.allows_modification?
28
28
  raise Dor::Exception.new "Primary object is not editable: #{@primary.pid}"
29
29
  end
30
- if ( non_editable = (@secondary_objs.detect {|obj| ! obj.allows_modification? } ))
30
+ if ( non_editable = (@secondary_objs.detect {|obj| !obj.allows_modification? } ))
31
31
  raise Dor::Exception.new "Secondary object is not editable: #{non_editable.pid}"
32
32
  end
33
33
  end
@@ -26,7 +26,7 @@ handler = Class.new do
26
26
  end
27
27
  case xml.root.name
28
28
  when 'msDesc' then xml.xpath('/msDesc/msIdentifier/collection').text
29
- when 'mods' then
29
+ when 'mods' then
30
30
  xml.root.add_namespace_definition('mods','http://www.loc.gov/mods/v3')
31
31
  xml.xpath('/mods:mods/mods:titleInfo[1]').xpath('mods:title|mods:nonSort').collect { |n| n.text }.join(' ').strip
32
32
  end
@@ -37,4 +37,4 @@ handler = Class.new do
37
37
  end
38
38
  end
39
39
 
40
- Dor::MetadataService.register(handler)
40
+ Dor::MetadataService.register(handler)
@@ -3,27 +3,27 @@ require 'cache'
3
3
  module Dor
4
4
 
5
5
  class MetadataError < Exception ; end
6
-
6
+
7
7
  # class MetadataHandler
8
- #
8
+ #
9
9
  # def fetch(prefix, identifier)
10
10
  # ### Return metadata for prefix/identifier combo
11
11
  # end
12
- #
12
+ #
13
13
  # def label(metadata)
14
14
  # ### Return a Fedora-compatible label from the metadata format returned by #fetch
15
15
  # end
16
- #
16
+ #
17
17
  # end
18
-
18
+
19
19
  class MetadataService
20
-
20
+
21
21
  class << self
22
22
  @@cache = Cache.new(nil, nil, 250, 300)
23
-
23
+
24
24
  def register(handler_class)
25
25
  ['fetch', 'label', 'prefixes'].each do |method|
26
- unless handler_class.instance_methods.include?(method) or handler_class.instance_methods.include?(method.to_sym)
26
+ unless handler_class.instance_methods.include?(method) or handler_class.instance_methods.include?(method.to_sym)
27
27
  raise TypeError, "Metadata handlers must define ##{method.to_s}"
28
28
  end
29
29
  end
@@ -31,23 +31,23 @@ module Dor
31
31
  handler.prefixes.each do |prefix|
32
32
  handlers[prefix.to_sym] = handler
33
33
  end
34
- return handler
34
+ handler
35
35
  end
36
-
36
+
37
37
  def known_prefixes
38
- return handlers.keys
38
+ handlers.keys
39
39
  end
40
-
40
+
41
41
  def can_resolve?(identifier)
42
42
  (prefix, identifier) = identifier.split(/:/,2)
43
43
  handlers.keys.include?(prefix.to_sym)
44
44
  end
45
-
45
+
46
46
  # TODO: Return a prioritized list
47
47
  def resolvable(identifiers)
48
48
  identifiers.select { |identifier| self.can_resolve?(identifier) }
49
49
  end
50
-
50
+
51
51
  def fetch(identifier)
52
52
  @@cache.fetch(identifier) do
53
53
  (prefix, identifier) = identifier.split(/:/,2)
@@ -61,24 +61,24 @@ module Dor
61
61
  handler = handler_for(prefix)
62
62
  handler.label(handler.fetch(prefix, identifier))
63
63
  end
64
-
64
+
65
65
  def handler_for(prefix)
66
66
  handler = handlers[prefix.to_sym]
67
67
  if handler.nil?
68
68
  raise MetadataError, "Unkown metadata prefix: #{prefix}"
69
69
  end
70
- return handler
70
+ handler
71
71
  end
72
-
72
+
73
73
  private
74
74
  def handlers
75
75
  @handlers ||= {}
76
76
  end
77
-
77
+
78
78
  end
79
-
79
+
80
80
  end
81
-
81
+
82
82
  end
83
83
 
84
84
  Dir[File.join(File.dirname(__FILE__),'metadata_handlers','*.rb')].each { |handler_file|
@@ -1,23 +1,23 @@
1
1
  require 'uuidtools'
2
2
 
3
3
  module Dor
4
-
4
+
5
5
  class RegistrationService
6
-
6
+
7
7
  class << self
8
8
  def register_object(params = {})
9
9
  Dor.ensure_models_loaded!
10
10
  [:object_type, :label].each do |required_param|
11
- raise Dor::ParameterError, "#{required_param.inspect} must be specified in call to #{self.name}.register_object" unless params[required_param]
11
+ raise Dor::ParameterError, "#{required_param.inspect} must be specified in call to #{name}.register_object" unless params[required_param]
12
12
  end
13
13
  metadata_source=params[:metadata_source]
14
- if params[:label].length<1 and (metadata_source=='label' || metadata_source=='none')
15
- raise Dor::ParameterError, "label cannot be empty to call #{self.name}.register_object"
14
+ if params[:label].length<1 && (metadata_source=='label' || metadata_source=='none')
15
+ raise Dor::ParameterError, "label cannot be empty to call #{name}.register_object"
16
16
  end
17
- object_type = params[:object_type]
17
+ object_type = params[:object_type]
18
18
  item_class = Dor.registered_classes[object_type]
19
19
  raise Dor::ParameterError, "Unknown item type: '#{object_type}'" if item_class.nil?
20
-
20
+
21
21
  content_model = params[:content_model]
22
22
  admin_policy = params[:admin_policy]
23
23
  label = params[:label]
@@ -37,12 +37,12 @@ module Dor
37
37
  else
38
38
  pid = Dor::SuriService.mint_id
39
39
  end
40
-
40
+
41
41
  rights=nil
42
42
  if params[:rights]
43
43
  rights=params[:rights]
44
- if not ['world','stanford','dark','default','none'].include? rights
45
- raise Dor::ParameterError,"Unknown rights setting" + rights + "when calling #{self.name}.register_object"
44
+ unless ['world','stanford','dark','default','none'].include? rights
45
+ raise Dor::ParameterError,"Unknown rights setting" + rights + "when calling #{name}.register_object"
46
46
  end
47
47
  end
48
48
 
@@ -54,17 +54,17 @@ module Dor
54
54
  end
55
55
  end
56
56
 
57
- if (other_ids.has_key?(:uuid) or other_ids.has_key?('uuid')) == false
57
+ if (other_ids.has_key?(:uuid) || other_ids.has_key?('uuid')) == false
58
58
  other_ids[:uuid] = UUIDTools::UUID.timestamp_create.to_s
59
59
  end
60
60
  short_label=label
61
61
  if label.length>254
62
62
  short_label=label[0,254]
63
63
  end
64
-
64
+
65
65
  apo_object = Dor.find(admin_policy, :lightweight => true)
66
66
  adm_xml = apo_object.administrativeMetadata.ng_xml
67
-
67
+
68
68
  new_item = item_class.new(:pid => pid)
69
69
  new_item.label = short_label
70
70
  idmd = new_item.identityMetadata
@@ -76,7 +76,7 @@ module Dor
76
76
  other_ids.each_pair { |name,value| idmd.add_otherId("#{name}:#{value}") }
77
77
  tags.each { |tag| idmd.add_value(:tag, tag) }
78
78
  new_item.admin_policy_object = apo_object
79
-
79
+
80
80
  adm_xml.xpath('/administrativeMetadata/relationships/*').each do |rel|
81
81
  short_predicate = ActiveFedora::RelsExtDatastream.short_predicate rel.namespace.href+rel.name
82
82
  if short_predicate.nil?
@@ -89,7 +89,7 @@ module Dor
89
89
  if collection
90
90
  new_item.add_collection(collection)
91
91
  end
92
- if(rights and ['item','collection'].include? object_type )
92
+ if(rights && ['item','collection'].include?(object_type) )
93
93
  rights_xml=apo_object.defaultObjectRights.ng_xml
94
94
  new_item.datastreams['rightsMetadata'].content=rights_xml.to_s
95
95
  new_item.set_read_rights(rights)
@@ -104,13 +104,13 @@ module Dor
104
104
  }
105
105
  }
106
106
  }
107
-
107
+
108
108
  ds.content=builder.to_xml
109
-
109
+
110
110
  end
111
-
111
+
112
112
  workflow_priority = params[:workflow_priority] ? params[:workflow_priority].to_i : 0
113
-
113
+
114
114
  Array(params[:seed_datastream]).each { |datastream_name| new_item.build_datastream(datastream_name) }
115
115
  Array(params[:initiate_workflow]).each { |workflow_id| new_item.initialize_workflow(workflow_id, !new_item.new_object?, workflow_priority)}
116
116
 
@@ -121,9 +121,9 @@ module Dor
121
121
  rescue StandardError => e
122
122
  Dor.logger.warn "Dor::RegistrationService.register_object failed to update solr index for #{new_item.pid}: #<#{e.class.name}: #{e.message}>"
123
123
  end
124
- return(new_item)
124
+ (new_item)
125
125
  end
126
-
126
+
127
127
  def create_from_request(params)
128
128
  other_ids = Array(params[:other_id]).collect do |id|
129
129
  if id =~ /^symphony:(.+)$/
@@ -132,14 +132,14 @@ module Dor
132
132
  id
133
133
  end
134
134
  end
135
-
135
+
136
136
  if params[:label] == ':auto'
137
137
  params.delete(:label)
138
138
  params.delete('label')
139
139
  metadata_id = Dor::MetadataService.resolvable(other_ids).first
140
140
  params[:label] = Dor::MetadataService.label_for(metadata_id)
141
141
  end
142
-
142
+
143
143
  dor_params = {
144
144
  :pid => params[:pid],
145
145
  :admin_policy => params[:admin_policy],
@@ -158,13 +158,13 @@ module Dor
158
158
  :workflow_priority => params[:workflow_priority]
159
159
  }
160
160
  dor_params.delete_if { |k,v| v.nil? }
161
-
162
- dor_obj = self.register_object(dor_params)
161
+
162
+ dor_obj = register_object(dor_params)
163
163
  pid = dor_obj.pid
164
164
  location = URI.parse(Dor::Config.fedora.safeurl.sub(/\/*$/,'/')).merge("objects/#{pid}").to_s
165
165
  reg_response = dor_params.dup.merge({ :location => location, :pid => pid })
166
166
  end
167
-
167
+
168
168
  private
169
169
  def ids_to_hash(ids)
170
170
  if ids.nil?
@@ -174,7 +174,7 @@ module Dor
174
174
  end
175
175
  end
176
176
  end
177
-
177
+
178
178
  end
179
179
 
180
180
  end