dor-services 4.22.3 → 4.22.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. checksums.yaml +8 -8
  2. data/bin/dor-indexer +19 -20
  3. data/bin/dor-indexerd +2 -3
  4. data/config/certs/robots-dor-dev.crt +29 -0
  5. data/config/certs/robots-dor-dev.key +27 -0
  6. data/config/dev_console_env.rb +77 -0
  7. data/lib/dor-services.rb +3 -3
  8. data/lib/dor/config.rb +15 -15
  9. data/lib/dor/datastreams/administrative_metadata_ds.rb +5 -5
  10. data/lib/dor/datastreams/content_metadata_ds.rb +181 -225
  11. data/lib/dor/datastreams/datastream_spec_solrizer.rb +1 -1
  12. data/lib/dor/datastreams/default_object_rights_ds.rb +8 -10
  13. data/lib/dor/datastreams/desc_metadata_ds.rb +35 -34
  14. data/lib/dor/datastreams/embargo_metadata_ds.rb +7 -7
  15. data/lib/dor/datastreams/events_ds.rb +11 -11
  16. data/lib/dor/datastreams/geo_metadata_ds.rb +86 -86
  17. data/lib/dor/datastreams/identity_metadata_ds.rb +19 -19
  18. data/lib/dor/datastreams/role_metadata_ds.rb +3 -3
  19. data/lib/dor/datastreams/simple_dublin_core_ds.rb +13 -13
  20. data/lib/dor/datastreams/version_metadata_ds.rb +5 -5
  21. data/lib/dor/datastreams/workflow_definition_ds.rb +21 -21
  22. data/lib/dor/migrations/identifiable/assert_adminPolicy.rb +1 -1
  23. data/lib/dor/migrations/identifiable/fix_model_assertions.rb +1 -1
  24. data/lib/dor/migrations/identifiable/record_remediation.rb +2 -2
  25. data/lib/dor/migrations/identifiable/uriify_augmented_contentlocation_refs.rb +1 -1
  26. data/lib/dor/migrations/identifiable/uriify_contentlocation_refs.rb +1 -1
  27. data/lib/dor/migrations/processable/unify_workflows.rb +4 -4
  28. data/lib/dor/migrations/versionable/add_missing_version_md.rb +1 -1
  29. data/lib/dor/models/admin_policy_object.rb +1 -1
  30. data/lib/dor/models/assembleable.rb +5 -5
  31. data/lib/dor/models/contentable.rb +27 -27
  32. data/lib/dor/models/describable.rb +168 -179
  33. data/lib/dor/models/discoverable.rb +13 -13
  34. data/lib/dor/models/editable.rb +55 -55
  35. data/lib/dor/models/embargoable.rb +26 -26
  36. data/lib/dor/models/eventable.rb +3 -3
  37. data/lib/dor/models/geoable.rb +8 -8
  38. data/lib/dor/models/governable.rb +14 -14
  39. data/lib/dor/models/identifiable.rb +117 -143
  40. data/lib/dor/models/item.rb +2 -2
  41. data/lib/dor/models/itemizable.rb +9 -9
  42. data/lib/dor/models/presentable.rb +8 -8
  43. data/lib/dor/models/preservable.rb +4 -4
  44. data/lib/dor/models/processable.rb +22 -23
  45. data/lib/dor/models/releaseable.rb +26 -26
  46. data/lib/dor/models/shelvable.rb +14 -14
  47. data/lib/dor/models/upgradable.rb +13 -13
  48. data/lib/dor/models/versionable.rb +2 -2
  49. data/lib/dor/models/workflow_object.rb +4 -4
  50. data/lib/dor/services/cleanup_reset_service.rb +27 -27
  51. data/lib/dor/services/cleanup_service.rb +4 -7
  52. data/lib/dor/services/digital_stacks_service.rb +10 -10
  53. data/lib/dor/services/merge_service.rb +1 -1
  54. data/lib/dor/services/metadata_handlers/mdtoolkit_handler.rb +2 -2
  55. data/lib/dor/services/metadata_service.rb +20 -20
  56. data/lib/dor/services/registration_service.rb +27 -27
  57. data/lib/dor/services/reset_workspace_service.rb +15 -15
  58. data/lib/dor/services/sdr_ingest_service.rb +6 -6
  59. data/lib/dor/services/search_service.rb +2 -2
  60. data/lib/dor/services/suri_service.rb +5 -5
  61. data/lib/dor/services/technical_metadata_service.rb +2 -3
  62. data/lib/dor/utils/ng_tidy.rb +9 -9
  63. data/lib/dor/utils/predicate_patch.rb +1 -1
  64. data/lib/dor/utils/solr_doc_helper.rb +2 -2
  65. data/lib/dor/version.rb +1 -1
  66. data/lib/dor/workflow/document.rb +19 -19
  67. data/lib/dor/workflow/graph.rb +36 -36
  68. data/lib/dor/workflow/process.rb +12 -12
  69. data/lib/tasks/dor.rake +1 -1
  70. data/lib/tasks/rdoc.rake +3 -3
  71. metadata +6 -3
@@ -4,32 +4,32 @@ module Dor
4
4
  class ResetWorkspaceService
5
5
 
6
6
  def self.reset_workspace_druid_tree(druid, version, workspace_root)
7
-
7
+
8
8
  druid_tree_path = DruidTools::Druid.new(druid, workspace_root).pathname.to_s
9
-
10
- raise "The archived directory #{druid_tree_path}_v#{version} already existed." if File.exists?("#{druid_tree_path}_v#{version}")
11
-
12
- if File.exists?(druid_tree_path)
9
+
10
+ raise "The archived directory #{druid_tree_path}_v#{version} already existed." if File.exists?("#{druid_tree_path}_v#{version}")
11
+
12
+ if File.exists?(druid_tree_path)
13
13
  FileUtils.mv(druid_tree_path, "#{druid_tree_path}_v#{version}")
14
14
  end #Else is a truncated tree where we shouldn't do anything
15
15
 
16
16
  end
17
17
 
18
18
  def self.reset_export_bag(druid, version, export_root)
19
-
19
+
20
20
  id = druid.split(':').last
21
21
  bag_dir = File.join(export_root, id)
22
22
 
23
- raise "The archived bag #{bag_dir}_v#{version} already existed." if File.exists?("#{bag_dir}_v#{version}")
24
-
25
- if File.exists?(bag_dir)
23
+ raise "The archived bag #{bag_dir}_v#{version} already existed." if File.exists?("#{bag_dir}_v#{version}")
24
+
25
+ if File.exists?(bag_dir)
26
26
  FileUtils.mv(bag_dir, "#{bag_dir}_v#{version}")
27
- end
28
-
29
- if File.exists?("#{bag_dir}.tar")
27
+ end
28
+
29
+ if File.exists?("#{bag_dir}.tar")
30
30
  FileUtils.mv("#{bag_dir}.tar", "#{bag_dir}_v#{version}.tar")
31
- end
31
+ end
32
32
  end
33
-
33
+
34
34
  end
35
- end
35
+ end
@@ -6,7 +6,7 @@ module Dor
6
6
  # @param [Dor::Item] dor_item The representation of the digital object
7
7
  # @param [String] agreement_id depreciated, included for backward compatability with common-accessoning
8
8
  # @return [void] Create the moab manifests, export data to a BagIt bag, kick off the SDR ingest workflow
9
- def self.transfer(dor_item, agreement_id=nil)
9
+ def self.transfer(dor_item, agreement_id = nil)
10
10
  druid = dor_item.pid
11
11
  workspace = DruidTools::Druid.new(druid,Dor::Config.sdr.local_workspace_root)
12
12
  signature_catalog = get_signature_catalog(druid)
@@ -16,14 +16,14 @@ module Dor
16
16
  version_inventory = get_version_inventory(metadata_dir, druid, new_version_id)
17
17
  version_addtions = signature_catalog.version_additions(version_inventory)
18
18
  content_addtions = version_addtions.group('content')
19
- if content_addtions.nil? or content_addtions.files.empty?
19
+ if content_addtions.nil? || content_addtions.files.empty?
20
20
  content_dir = nil
21
21
  else
22
22
  new_file_list = content_addtions.path_list
23
23
  content_dir = workspace.find_filelist_parent('content',new_file_list)
24
24
  end
25
25
  content_group = version_inventory.group('content')
26
- unless content_group.nil? or content_group.files.empty?
26
+ unless content_group.nil? || content_group.files.empty?
27
27
  signature_catalog.normalize_group_signatures(content_group, content_dir)
28
28
  end
29
29
  # export the bag (in tar format)
@@ -61,7 +61,7 @@ module Dor
61
61
  Config.sdr.datastreams.to_hash.each_pair do |ds_name, required|
62
62
  ds_name = ds_name.to_s
63
63
  metadata_file = metadata_dir.join("#{ds_name}.xml")
64
- metadata_string = self.get_datastream_content(dor_item, ds_name, required)
64
+ metadata_string = get_datastream_content(dor_item, ds_name, required)
65
65
  metadata_file.open('w') { |f| f << metadata_string } if metadata_string
66
66
  end
67
67
  metadata_dir
@@ -74,7 +74,7 @@ module Dor
74
74
  # If not found, return nil unless it is a required datastream in which case raise exception
75
75
  def self.get_datastream_content(dor_item, ds_name, required)
76
76
  ds = (ds_name == 'relationshipMetadata' ? 'RELS-EXT' : ds_name)
77
- if dor_item.datastreams.keys.include?(ds) and not dor_item.datastreams[ds].new?
77
+ if dor_item.datastreams.keys.include?(ds) && !dor_item.datastreams[ds].new?
78
78
  return dor_item.datastreams[ds].content
79
79
  elsif (required == 'optional')
80
80
  return nil
@@ -175,4 +175,4 @@ module Dor
175
175
 
176
176
  end
177
177
 
178
- end
178
+ end
@@ -73,7 +73,7 @@ module Dor
73
73
  result = JSON.parse(client["select?#{query_string}"].get)
74
74
  end
75
75
 
76
- def query query, args={}
76
+ def query query, args = {}
77
77
  params = args.merge({ :q => query })
78
78
  params[:start] ||= 0
79
79
  resp = solr.find params
@@ -128,4 +128,4 @@ module Dor
128
128
 
129
129
  end
130
130
 
131
- end
131
+ end
@@ -6,7 +6,7 @@ module Dor
6
6
  # If Dor::Config.suri.mint_ids is set to true, then this method
7
7
  # returns Config.suri.id_namespace:id_from_suri
8
8
  # Throws an exception if there were any problems
9
- def self.mint_id quantity=nil
9
+ def self.mint_id quantity = nil
10
10
  want_array = quantity.is_a?(Numeric)
11
11
  quantity = 1 if quantity.nil?
12
12
  ids = []
@@ -20,13 +20,13 @@ module Dor
20
20
  resp = Nokogiri::XML(repo.next_pid :numPIDs => quantity)
21
21
  ids = resp.xpath('/pidList/pid').collect { |node| node.text }
22
22
  end
23
- return want_array ? ids : ids.first
23
+ want_array ? ids : ids.first
24
24
 
25
25
  # rescue Exception => e
26
26
  # Rails.logger.error("Unable to mint id from suri: #{e.to_s}")
27
27
  # raise e
28
28
  end
29
-
30
-
29
+
30
+
31
31
  end
32
- end
32
+ end
@@ -99,7 +99,7 @@ module Dor
99
99
  # The data is updated to the latest format.
100
100
  def self.get_dor_technical_metadata(dor_item)
101
101
  ds = "technicalMetadata"
102
- if dor_item.datastreams.keys.include?(ds) and not dor_item.datastreams[ds].new?
102
+ if dor_item.datastreams.keys.include?(ds) && !dor_item.datastreams[ds].new?
103
103
  dor_techmd = dor_item.datastreams[ds].content
104
104
  else
105
105
  return nil
@@ -127,7 +127,7 @@ module Dor
127
127
  # @param [Array<String>] new_files The list of filenames for files that are either added or modifed since the previous version
128
128
  # @return [String] The technicalMetadata datastream for the new files of the new digital object version
129
129
  def self.get_new_technical_metadata(druid, new_files)
130
- return nil if new_files.nil? or new_files.empty?
130
+ return nil if new_files.nil? || new_files.empty?
131
131
  workspace = DruidTools::Druid.new(druid, Dor::Config.sdr.local_workspace_root)
132
132
  content_dir = workspace.find_filelist_parent('content',new_files)
133
133
  temp_dir = workspace.temp_dir
@@ -223,4 +223,3 @@ module Dor
223
223
  end
224
224
 
225
225
  end
226
-
@@ -1,11 +1,11 @@
1
1
  class Nokogiri::XML::Text
2
-
2
+
3
3
  def normalize
4
- self.content =~ /\S/ ? self.content.gsub(/\s+/,' ').strip : self.content
4
+ content =~ /\S/ ? content.gsub(/\s+/,' ').strip : content
5
5
  end
6
-
6
+
7
7
  def normalize!
8
- self.content = self.normalize
8
+ self.content = normalize
9
9
  end
10
10
 
11
11
  end
@@ -13,13 +13,13 @@ end
13
13
  class Nokogiri::XML::Node
14
14
 
15
15
  def normalize_text!
16
- self.xpath('//text()').each { |t| t.normalize! }
16
+ xpath('//text()').each { |t| t.normalize! }
17
17
  end
18
-
18
+
19
19
  end
20
20
 
21
21
  class Nokogiri::XML::Document
22
-
22
+
23
23
  def prettify
24
24
  xslt = Nokogiri::XSLT <<-EOC
25
25
  <xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
@@ -33,5 +33,5 @@ class Nokogiri::XML::Document
33
33
  EOC
34
34
  xslt.transform(self).to_xml
35
35
  end
36
-
37
- end
36
+
37
+ end
@@ -20,4 +20,4 @@ module ActiveFedora
20
20
  end
21
21
  end
22
22
  end
23
- end
23
+ end
@@ -1,9 +1,9 @@
1
1
  module SolrDocHelper
2
-
2
+
3
3
  def add_solr_value(solr_doc, field_name, value, field_type = :default, index_types = [:searchable])
4
4
  index_types.each { |index_type|
5
5
  ::Solrizer::Extractor.insert_solr_field_value(solr_doc, ::ActiveFedora::SolrService.solr_name(field_name, field_type, index_type), value)
6
6
  }
7
7
  end
8
-
8
+
9
9
  end
data/lib/dor/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Dor
2
- VERSION = '4.22.3'
2
+ VERSION = '4.22.4'
3
3
  end
@@ -39,20 +39,20 @@ module Workflow
39
39
 
40
40
  def definition
41
41
  @definition ||= begin
42
- if @@definitions.has_key? self.workflowId.first
43
- @@definitions[self.workflowId.first]
42
+ if @@definitions.has_key? workflowId.first
43
+ @@definitions[workflowId.first]
44
44
  else
45
- wfo = Dor::WorkflowObject.find_by_name(self.workflowId.first)
45
+ wfo = Dor::WorkflowObject.find_by_name(workflowId.first)
46
46
  wf_def=wfo ? wfo.definition : nil
47
- @@definitions[self.workflowId.first] = wf_def
47
+ @@definitions[workflowId.first] = wf_def
48
48
  wf_def
49
49
  end
50
50
  end
51
51
  end
52
52
 
53
- def graph(parent=nil, dir=nil)
54
- wf_definition = self.definition
55
- result = wf_definition ? Workflow::Graph.from_processes(wf_definition.repo, wf_definition.name, self.processes, parent) : nil
53
+ def graph(parent = nil, dir = nil)
54
+ wf_definition = definition
55
+ result = wf_definition ? Workflow::Graph.from_processes(wf_definition.repo, wf_definition.name, processes, parent) : nil
56
56
  unless result.nil?
57
57
  result['rankdir'] = dir || 'TB'
58
58
  end
@@ -60,7 +60,7 @@ module Workflow
60
60
  end
61
61
 
62
62
  def [](value)
63
- self.processes.find { |p| p.name == value }
63
+ processes.find { |p| p.name == value }
64
64
  end
65
65
 
66
66
  def processes
@@ -69,15 +69,15 @@ module Workflow
69
69
  return []
70
70
  end
71
71
  @processes ||=
72
- if self.definition
73
- self.definition.processes.collect do |process|
72
+ if definition
73
+ definition.processes.collect do |process|
74
74
  node = ng_xml.at("/workflow/process[@name = '#{process.name}']")
75
75
  process.update!(node,self) unless node.nil?
76
76
  process
77
77
  end
78
78
  else
79
- self.find_by_terms(:workflow, :process).collect do |x|
80
- pnode = Dor::Workflow::Process.new(self.repository, self.workflowId, {})
79
+ find_by_terms(:workflow, :process).collect do |x|
80
+ pnode = Dor::Workflow::Process.new(repository, workflowId, {})
81
81
  pnode.update!(x,self)
82
82
  pnode
83
83
  end.sort_by(&:datetime)
@@ -85,12 +85,12 @@ module Workflow
85
85
  end
86
86
 
87
87
  def workflow_should_show_completed? processes
88
- return processes.all?{|p| ['skipped', 'completed', '', nil].include?(p.status)}
88
+ processes.all?{|p| ['skipped', 'completed', '', nil].include?(p.status)}
89
89
  end
90
90
 
91
- def to_solr(solr_doc=Hash.new, *args)
92
- wf_name = self.workflowId.first
93
- repo = self.repository.first
91
+ def to_solr(solr_doc = Hash.new, *args)
92
+ wf_name = workflowId.first
93
+ repo = repository.first
94
94
  add_solr_value(solr_doc, 'wf', wf_name, :string, [:facetable])
95
95
  add_solr_value(solr_doc, 'wf_wps', wf_name, :string, [:facetable])
96
96
  add_solr_value(solr_doc, 'wf_wsp', wf_name, :string, [:facetable])
@@ -101,7 +101,7 @@ module Workflow
101
101
  processes.each do |process|
102
102
  if process.status.present?
103
103
  #add a record of the robot having operated on this item, so we can track robot activity
104
- if process.date_time and process.status and (process.status == 'completed' || process.status == 'error')
104
+ if process.date_time && process.status && (process.status == 'completed' || process.status == 'error')
105
105
  add_solr_value(solr_doc, "wf_#{wf_name}_#{process.name}", process.date_time+'Z', :date)
106
106
  end
107
107
  add_solr_value(solr_doc, 'wf_error', "#{wf_name}:#{process.name}:#{process.error_message}", :string, [:facetable,:displayable]) if process.error_message #index the error message without the druid so we hopefully get some overlap
@@ -131,8 +131,8 @@ module Workflow
131
131
  end
132
132
 
133
133
  def inspect
134
- "#<#{self.class.name}:#{self.object_id}>"
134
+ "#<#{self.class.name}:#{object_id}>"
135
135
  end
136
136
  end
137
137
  end
138
- end
138
+ end
@@ -3,16 +3,16 @@ require 'graphviz'
3
3
  module Dor
4
4
  module Workflow
5
5
  class Graph
6
-
6
+
7
7
  FILL_COLORS = { 'waiting' => "white", 'ready' => "white", 'error' => "#8B0000", 'blocked' => "white", 'completed' => "darkgreen", 'unknown' => "#CFCFCF" }
8
8
  TEXT_COLORS = { 'waiting' => "black", 'ready' => "black", 'error' => "white", 'blocked' => "#8B0000", 'completed' => "white", 'unknown' => "black" }
9
9
  PATTERNS = { 'waiting' => "diagonals", 'ready' => "filled", 'error' => "filled", 'blocked' => "diagonals", 'completed' => "filled", 'unknown' => "filled" }
10
10
  RESERVED_KEYS = ['repository','name']
11
11
 
12
12
  attr_reader :repo, :name, :processes, :graph, :root
13
-
13
+
14
14
  def self.from_config(name, config, parent = nil)
15
- wf = self.new(config['repository'], name, parent)
15
+ wf = new(config['repository'], name, parent)
16
16
  config.keys.each { |p| wf.add_process(p.to_s) unless RESERVED_KEYS.include?(p) }
17
17
  config.keys.each { |p|
18
18
  if wf.processes[p]
@@ -27,12 +27,12 @@ class Graph
27
27
  end
28
28
  }
29
29
  wf.finish
30
- return wf
30
+ wf
31
31
  end
32
-
32
+
33
33
  def self.from_processes(repo, name, processes, parent = nil)
34
- wf = self.new(repo, name, parent)
35
- processes.each { |p|
34
+ wf = new(repo, name, parent)
35
+ processes.each { |p|
36
36
  wf.add_process(p.name).status = p.state || 'unknown'
37
37
  }
38
38
  processes.each { |p|
@@ -46,15 +46,15 @@ class Graph
46
46
  }
47
47
  }
48
48
  wf.finish
49
- return wf
49
+ wf
50
50
  end
51
-
51
+
52
52
  def initialize(repo, name, parent = nil)
53
53
  @repo = repo
54
54
  @name = name
55
55
  if parent.nil?
56
56
  @graph = GraphViz.new(qname)
57
- @root = self.add_nodes(name)
57
+ @root = add_nodes(name)
58
58
  else
59
59
  @graph = parent.subgraph(qname)
60
60
  @root = parent.add_nodes(name)
@@ -63,34 +63,34 @@ class Graph
63
63
  @root.shape = 'plaintext'
64
64
  @processes = {}
65
65
  end
66
-
66
+
67
67
  def qname
68
68
  [@repo,@name].join(':')
69
69
  end
70
-
70
+
71
71
  def add_process(name, external = false)
72
72
  pqname = name.split(/:/).length == 3 ? name : [qname,name].join(':')
73
73
  p = Process.new(self, pqname, name)
74
74
  @processes[name] = p
75
- return p
75
+ p
76
76
  end
77
-
77
+
78
78
  def finish
79
79
  @processes.values.each do |process|
80
80
  process.node.fontname = 'Helvetica'
81
- if process.id =~ %r{^#{qname}} and process.prerequisites.length == 0
81
+ if process.id =~ %r{^#{qname}} && process.prerequisites.length == 0
82
82
  (@root << process.node)[:arrowhead => 'none', :arrowtail => 'none', :dir => 'both', :style => 'invisible']
83
83
  end
84
84
  end
85
85
 
86
86
  @root.fontname = 'Helvetica'
87
- return self
87
+ self
88
88
  end
89
-
89
+
90
90
  def inspect
91
- "#{self.to_s[0..-2]} #{repo}:#{name} (#{processes.keys.join(', ')})>"
91
+ "#{to_s[0..-2]} #{repo}:#{name} (#{processes.keys.join(', ')})>"
92
92
  end
93
-
93
+
94
94
  def method_missing(sym,*args)
95
95
  if @graph.respond_to?(sym)
96
96
  @graph.send(sym,*args)
@@ -98,11 +98,11 @@ class Graph
98
98
  super
99
99
  end
100
100
  end
101
-
101
+
102
102
  class Process
103
-
103
+
104
104
  attr_reader :name, :status, :node, :prerequisites
105
-
105
+
106
106
  def initialize(graph, id, name)
107
107
  @name = name
108
108
  @graph = graph
@@ -110,13 +110,13 @@ class Graph
110
110
  @node.shape = 'box'
111
111
  @node.label = name
112
112
  @prerequisites = []
113
- self.set_status('unknown')
113
+ set_status('unknown')
114
114
  end
115
-
115
+
116
116
  def id
117
117
  @node.id
118
118
  end
119
-
119
+
120
120
  def status=(s)
121
121
  @status = s
122
122
  if s == 'external'
@@ -129,14 +129,14 @@ class Graph
129
129
  @node.style = PATTERNS[s]
130
130
  end
131
131
  end
132
-
132
+
133
133
  def set_status(s)
134
134
  self.status = s
135
- return self
135
+ self
136
136
  end
137
-
137
+
138
138
  def depends_on(*processes)
139
- wf1 = self.id.split(/:/)[0..1].join(':')
139
+ wf1 = id.split(/:/)[0..1].join(':')
140
140
  processes.each { |process|
141
141
  wf2 = process.id.split(/:/)[0..1].join(':')
142
142
  edge = (process.node << @node)
@@ -146,21 +146,21 @@ class Graph
146
146
  if (wf1 != wf2)
147
147
  edge.style = 'dashed'
148
148
  end
149
- self.prerequisites << process
149
+ prerequisites << process
150
150
  }
151
- return self
151
+ self
152
152
  end
153
-
153
+
154
154
  def same_as(process)
155
- @node = process.node
155
+ @node = process.node
156
156
  end
157
-
157
+
158
158
  def all_prerequisites
159
159
  prerequisites.collect { |p| p.all_prerequisites + [p.name] }.flatten.uniq
160
160
  end
161
-
161
+
162
162
  end
163
163
 
164
164
  end
165
165
  end
166
- end
166
+ end