sufia-models 4.3.1 → 5.0.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/app/actors/sufia/generic_file/actor.rb +7 -10
  3. data/app/jobs/active_fedora_pid_based_job.rb +2 -3
  4. data/app/jobs/audit_job.rb +28 -31
  5. data/app/jobs/batch_update_job.rb +9 -8
  6. data/app/jobs/import_url_job.rb +2 -2
  7. data/app/models/batch.rb +11 -12
  8. data/app/models/checksum_audit_log.rb +7 -8
  9. data/app/models/concerns/sufia/ability.rb +4 -6
  10. data/app/models/concerns/sufia/collection.rb +4 -5
  11. data/app/models/concerns/sufia/file_stat_utils.rb +3 -3
  12. data/app/models/concerns/sufia/generic_file.rb +16 -14
  13. data/app/models/concerns/sufia/generic_file/audit.rb +50 -31
  14. data/app/models/concerns/sufia/generic_file/characterization.rb +3 -3
  15. data/app/models/concerns/sufia/generic_file/derivatives.rb +5 -5
  16. data/app/models/concerns/sufia/generic_file/full_text_indexing.rb +2 -2
  17. data/app/models/concerns/sufia/generic_file/metadata.rb +82 -11
  18. data/app/models/concerns/sufia/generic_file/proxy_deposit.rb +12 -3
  19. data/app/models/concerns/sufia/generic_file/versions.rb +1 -4
  20. data/app/models/concerns/sufia/generic_file/web_form.rb +13 -6
  21. data/app/models/concerns/sufia/model_methods.rb +11 -9
  22. data/app/models/concerns/sufia/user.rb +11 -28
  23. data/app/models/datastreams/file_content_datastream.rb +1 -1
  24. data/app/models/datastreams/fits_datastream.rb +1 -1
  25. data/app/models/file_download_stat.rb +2 -2
  26. data/app/models/file_usage.rb +5 -9
  27. data/app/models/file_view_stat.rb +2 -2
  28. data/app/models/local_authority.rb +2 -2
  29. data/app/models/proxy_deposit_request.rb +1 -1
  30. data/app/services/sufia/id_service.rb +5 -5
  31. data/app/services/sufia/noid.rb +10 -7
  32. data/lib/generators/sufia/models/cached_stats_generator.rb +31 -2
  33. data/lib/generators/sufia/models/install_generator.rb +31 -11
  34. data/lib/generators/sufia/models/proxies_generator.rb +31 -2
  35. data/lib/generators/sufia/models/templates/config/sufia.rb +10 -3
  36. data/lib/generators/sufia/models/upgrade400_generator.rb +33 -2
  37. data/lib/sufia/models/engine.rb +13 -4
  38. data/lib/sufia/models/file_content/versions.rb +12 -8
  39. data/lib/sufia/models/version.rb +1 -1
  40. data/lib/sufia/permissions/writable.rb +34 -16
  41. data/sufia-models.gemspec +4 -2
  42. metadata +91 -79
  43. data/app/models/concerns/sufia/generic_file/reload_on_save.rb +0 -18
  44. data/app/models/concerns/sufia/properties_datastream_behavior.rb +0 -32
  45. data/app/models/concerns/sufia/user_usage_stats.rb +0 -15
  46. data/app/models/datastreams/batch_rdf_datastream.rb +0 -6
  47. data/app/models/datastreams/generic_file_rdf_datastream.rb +0 -69
  48. data/app/models/datastreams/paranoid_rights_datastream.rb +0 -22
  49. data/app/models/datastreams/properties_datastream.rb +0 -4
  50. data/app/models/sufia/orcid_validator.rb +0 -8
  51. data/app/models/user_stat.rb +0 -2
  52. data/lib/generators/sufia/models/abstract_migration_generator.rb +0 -30
  53. data/lib/generators/sufia/models/orcid_field_generator.rb +0 -19
  54. data/lib/generators/sufia/models/templates/migrations/add_orcid_to_users.rb +0 -5
  55. data/lib/generators/sufia/models/templates/migrations/create_user_stats.rb +0 -19
  56. data/lib/generators/sufia/models/user_stats_generator.rb +0 -31
  57. data/lib/sufia/models/stats/user_stat_importer.rb +0 -85
  58. data/lib/tasks/stats_tasks.rake +0 -12
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c4b6c01683739e17c53f491882eb8c50ead27577
4
- data.tar.gz: 2b780079c2b52e9f076df2e50e61835bc1f3ab2a
3
+ metadata.gz: d1a8c6e6170dab340eb52e7102f29c63de547850
4
+ data.tar.gz: 68b0a675fb176f9132a01f791ec3ea7302acd716
5
5
  SHA512:
6
- metadata.gz: 8fdfc3fdd81834097e68d9180cd860edb2f9813226532dd6fe06d643dddce743513556fb1a5f1af7e6bae93a662d6438ab0a6ef25d7521569127c4fcb321a489
7
- data.tar.gz: 3249a331ab148fad1979b43b27fee9bf56ef1299061fde4f4ac0489effb9bee1a4d8357464de85350b2b78996d5f9e584565ffca94da43628b44405f895b3c7a
6
+ metadata.gz: e90c6e067dae4e9aa8c10bbf7d914e4cc28b18355822244cf46be1622d2c7068aba56730e004bea8112fa4083fe77d66635b45e9de8f84c1ce765695db1f15a3
7
+ data.tar.gz: c2cf9171d0cfaa461c0afc944e250bf9adf0b240f10be8d36d840b48429b73c823d9d170f8514453d9b66b3eef6d820e449ea019bd0bf6b1ff3ce27521a38b7c
@@ -20,7 +20,7 @@ module Sufia::GenericFile
20
20
  generic_file.creator = [user.name]
21
21
 
22
22
  if batch_id
23
- generic_file.batch_id = Sufia::Noid.namespaceize(batch_id)
23
+ generic_file.batch_id = batch_id
24
24
  else
25
25
  ActiveFedora::Base.logger.warn "unable to find batch to attach to"
26
26
  end
@@ -28,8 +28,7 @@ module Sufia::GenericFile
28
28
  end
29
29
 
30
30
  def create_content(file, file_name, dsid)
31
- fname = generic_file.label.blank? ? file_name.truncate(255) : generic_file.label
32
- generic_file.add_file(file, dsid, fname)
31
+ generic_file.add_file(file, dsid, file_name.truncate(255))
33
32
  save_characterize_and_record_committer do
34
33
  if Sufia.config.respond_to?(:after_create_content)
35
34
  Sufia.config.after_create_content.call(generic_file, user)
@@ -37,9 +36,8 @@ module Sufia::GenericFile
37
36
  end
38
37
  end
39
38
 
40
- def revert_content(revision_id, datastream_id)
41
- revision = generic_file.content.get_version(revision_id)
42
- generic_file.add_file(revision.content, datastream_id, revision.label)
39
+ def revert_content(revision_id)
40
+ generic_file.content.restore_version(revision_id)
43
41
  save_characterize_and_record_committer do
44
42
  if Sufia.config.respond_to?(:after_revert_content)
45
43
  Sufia.config.after_revert_content.call(generic_file, user, revision_id)
@@ -69,11 +67,10 @@ module Sufia::GenericFile
69
67
  end
70
68
 
71
69
  def destroy
72
- pid = generic_file.pid #Work around for https://github.com/projecthydra/active_fedora/issues/422
73
70
  generic_file.destroy
74
- FeaturedWork.where(generic_file_id: pid).destroy_all
71
+ FeaturedWork.where(generic_file_id: generic_file.id).destroy_all
75
72
  if Sufia.config.respond_to?(:after_destroy)
76
- Sufia.config.after_destroy.call(pid, user)
73
+ Sufia.config.after_destroy.call(generic_file.id, user)
77
74
  end
78
75
  end
79
76
 
@@ -103,7 +100,7 @@ module Sufia::GenericFile
103
100
  end
104
101
 
105
102
  def push_characterize_job
106
- Sufia.queue.push(CharacterizeJob.new(@generic_file.pid))
103
+ Sufia.queue.push(CharacterizeJob.new(@generic_file.id))
107
104
  end
108
105
 
109
106
  class << self
@@ -10,10 +10,9 @@ class ActiveFedoraPidBasedJob
10
10
  end
11
11
 
12
12
  def object
13
- @object ||= ActiveFedora::Base.find(pid, cast: true).tap do |f|
14
- f.reload_on_save = true
15
- end
13
+ @object ||= ActiveFedora::Base.find(pid)
16
14
  end
15
+
17
16
  alias_method :generic_file, :object
18
17
  alias_method :generic_file_id, :pid
19
18
 
@@ -6,44 +6,41 @@ class AuditJob < ActiveFedoraPidBasedJob
6
6
  PASS = 'Passing Audit Run'
7
7
  FAIL = 'Failing Audit Run'
8
8
 
9
- attr_accessor :pid, :datastream_id, :version_id
9
+ attr_accessor :uri, :pid, :path
10
10
 
11
- def initialize(pid, datastream_id, version_id)
12
- super(pid)
13
- self.datastream_id = datastream_id
14
- self.version_id = version_id
11
+ # URI of the resource to audit.
12
+ # This URI could include the actual resource (e.g. content) and the version to audit:
13
+ # http://localhost:8983/fedora/rest/test/a/b/c/abcxyz/content/fcr:versions/version1
14
+ # but it could also just be:
15
+ # http://localhost:8983/fedora/rest/test/a/b/c/abcxyz/content
16
+ def initialize(id, path, uri)
17
+ super(uri)
18
+ self.pid = id
19
+ self.path = path
20
+ self.uri = uri
15
21
  end
16
22
 
17
23
  def run
18
- if generic_file
19
- datastream = generic_file.datastreams[datastream_id]
20
- if datastream
21
- version = datastream.versions.select { |v| v.versionID == version_id}.first
22
- log = run_audit(version)
23
-
24
- # look up the user for sending the message to
25
- login = generic_file.depositor
26
- if login
27
- user = User.find_by_user_key(login)
28
- ActiveFedora::Base.logger.warn "User '#{login}' not found" unless user
29
- job_user = User.audituser()
30
- # send the user a message about the failing audit
31
- unless (log.pass == 1)
32
- message = "The audit run at #{log.created_at} for #{log.pid}:#{log.dsid}:#{log.version} was #{log.pass == 1 ? 'passing' : 'failing'}."
33
- subject = (log.pass == 1 ? PASS : FAIL)
34
- job_user.send_message(user, message, subject)
35
- end
36
- end
37
- else
38
- ActiveFedora::Base.logger.warn "No datastream for audit!!!!! pid: #{pid} dsid: #{datastream_id}"
39
- end
40
- else
41
- ActiveFedora::Base.logger.warn "No generic file for data stream audit!!!!! pid: #{pid} dsid: #{datastream_id}"
24
+ fixity_ok = false
25
+ log = run_audit(pid, path, uri)
26
+ fixity_ok = (log.pass == 1)
27
+ unless fixity_ok
28
+ # send the user a message about the failing audit
29
+ login = generic_file.depositor
30
+ user = User.find_by_user_key(login)
31
+ ActiveFedora::Base.logger.warn "User '#{login}' not found" unless user
32
+ job_user = User.audituser()
33
+ file_title = generic_file.title.first
34
+ message = "The audit run at #{log.created_at} for #{file_title} (#{uri}) failed."
35
+ subject = FAIL
36
+ job_user.send_message(user, message, subject)
42
37
  end
38
+ fixity_ok
43
39
  end
44
40
 
45
41
  private
46
- def run_audit(version)
47
- object.class.run_audit(version)
42
+ def run_audit(id, path, uri)
43
+ object.class.run_audit(id, path, uri)
48
44
  end
45
+
49
46
  end
@@ -10,7 +10,7 @@ class BatchUpdateJob
10
10
 
11
11
  def initialize(login, params)
12
12
  self.login = login
13
- self.title = params[:title]
13
+ self.title = params[:title] || {}
14
14
  self.file_attributes = params[:generic_file]
15
15
  self.visibility = params[:visibility]
16
16
  self.batch_id = params[:id]
@@ -25,7 +25,9 @@ class BatchUpdateJob
25
25
  batch.generic_files.each do |gf|
26
26
  update_file(gf, user)
27
27
  end
28
- batch.update_attributes({status:["Complete"]})
28
+
29
+ batch.update(status: ["Complete"])
30
+
29
31
  if denied.empty?
30
32
  send_user_success_message(user, batch) unless saved.empty?
31
33
  else
@@ -35,12 +37,12 @@ class BatchUpdateJob
35
37
 
36
38
  def update_file(gf, user)
37
39
  unless user.can? :edit, gf
38
- ActiveFedora::Base.logger.error "User #{user.user_key} DENIED access to #{gf.pid}!"
40
+ ActiveFedora::Base.logger.error "User #{user.user_key} DENIED access to #{gf.id}!"
39
41
  denied << gf
40
42
  return
41
43
  end
42
- gf.title = title[gf.pid] if title[gf.pid] rescue gf.label
43
- gf.attributes=file_attributes
44
+ gf.title = title[gf.id] if title[gf.id]
45
+ gf.attributes = file_attributes
44
46
  gf.visibility= visibility
45
47
 
46
48
  save_tries = 0
@@ -48,13 +50,13 @@ class BatchUpdateJob
48
50
  gf.save!
49
51
  rescue RSolr::Error::Http => error
50
52
  save_tries += 1
51
- ActiveFedora::Base.logger.warn "BatchUpdateJob caught RSOLR error on #{gf.pid}: #{error.inspect}"
53
+ ActiveFedora::Base.logger.warn "BatchUpdateJob caught RSOLR error on #{gf.id}: #{error.inspect}"
52
54
  # fail for good if the tries is greater than 3
53
55
  raise error if save_tries >=3
54
56
  sleep 0.01
55
57
  retry
56
58
  end #
57
- Sufia.queue.push(ContentUpdateEventJob.new(gf.pid, login))
59
+ Sufia.queue.push(ContentUpdateEventJob.new(gf.id, login))
58
60
  saved << gf
59
61
  end
60
62
 
@@ -67,5 +69,4 @@ class BatchUpdateJob
67
69
  message = denied.count > 1 ? multiple_failure(batch.noid, denied) : single_failure(batch.noid, denied.first)
68
70
  User.batchuser.send_message(user, message, failure_subject, sanitize_text = false)
69
71
  end
70
-
71
72
  end
@@ -11,12 +11,12 @@ class ImportUrlJob < ActiveFedoraPidBasedJob
11
11
  def run
12
12
  user = User.find_by_user_key(generic_file.depositor)
13
13
 
14
- Tempfile.open(self.pid) do |f|
14
+ Tempfile.open(pid.gsub('/', '_')) do |f|
15
15
  path = copy_remote_file(generic_file.import_url, f)
16
16
  # attach downloaded file to generic file stubbed out
17
17
  if Sufia::GenericFile::Actor.new(generic_file, user).create_content(f, path, 'content')
18
18
  # add message to user for downloaded file
19
- message = "The file (#{generic_file.content.label}) was successfully imported."
19
+ message = "The file (#{generic_file.label}) was successfully imported."
20
20
  job_user.send_message(user, message, 'File Import')
21
21
  else
22
22
  job_user.send_message(user, generic_file.errors.full_messages.join(', '), 'File Import Error')
data/app/models/batch.rb CHANGED
@@ -3,24 +3,23 @@ class Batch < ActiveFedora::Base
3
3
  include Sufia::ModelMethods
4
4
  include Sufia::Noid
5
5
 
6
- has_metadata name: "descMetadata", type: BatchRdfDatastream
6
+ has_many :generic_files, predicate: ActiveFedora::RDF::Fcrepo::RelsExt.isPartOf
7
7
 
8
- belongs_to :user, property: "creator"
9
- has_many :generic_files, property: :is_part_of
8
+ property :creator, predicate: ::RDF::DC.creator
9
+ property :title, predicate: ::RDF::DC.title
10
+ property :status, predicate: ::RDF::DC.type
10
11
 
11
- has_attributes :title, :creator, :part, :status, datastream: :descMetadata, multiple: true
12
-
13
- def self.find_or_create(pid)
12
+ def self.find_or_create(id)
14
13
  begin
15
- Batch.find(pid)
14
+ Batch.find(id)
16
15
  rescue ActiveFedora::ObjectNotFoundError
17
- Batch.create({pid: pid})
16
+ Batch.create(id: id)
18
17
  end
19
18
  end
20
19
 
21
- def to_solr(solr_doc={}, opts={})
22
- solr_doc = super(solr_doc, opts)
23
- solr_doc[Solrizer.solr_name('noid', Sufia::GenericFile.noid_indexer)] = noid
24
- return solr_doc
20
+ def to_solr(solr_doc={})
21
+ super.tap do |solr_doc|
22
+ solr_doc[Solrizer.solr_name('noid', Sufia::GenericFile.noid_indexer)] = noid
23
+ end
25
24
  end
26
25
  end
@@ -1,16 +1,15 @@
1
1
  class ChecksumAuditLog < ActiveRecord::Base
2
2
 
3
- def ChecksumAuditLog.get_audit_log(version)
4
- ChecksumAuditLog.find_or_create_by_pid_and_dsid_and_version(pid: version.pid,
5
- dsid: version.dsid,
6
- version: version.versionID)
3
+ def ChecksumAuditLog.get_audit_log(id, path, version_uri)
4
+ ChecksumAuditLog.find_or_create_by(pid: id, dsid: path, version: version_uri)
7
5
  end
8
6
 
9
- def ChecksumAuditLog.prune_history(version)
10
- ## Check to see if there are previous passing logs that we can delete
11
- # we want to keep the first passing event after a failure, the most current passing event, and all failures so that this table doesn't grow too large
7
+ def ChecksumAuditLog.prune_history(id, path)
8
+ # Check to see if there are previous passing logs that we can delete
9
+ # we want to keep the first passing event after a failure, the most current passing event,
10
+ # and all failures so that this table doesn't grow too large
12
11
  # Simple way (a little naieve): if the last 2 were passing, delete the first one
13
- logs = GenericFile.load_instance_from_solr(version.pid).logs(version.dsid)
12
+ logs = GenericFile.load_instance_from_solr(id).logs(path)
14
13
  list = logs.limit(2)
15
14
  if list.size > 1 && (list[0].pass == 1) && (list[1].pass == 1)
16
15
  list[0].destroy
@@ -15,8 +15,8 @@ module Sufia
15
15
  end
16
16
 
17
17
  def proxy_deposit_abilities
18
- can :transfer, String do |pid|
19
- get_depositor_from_pid(pid) == current_user.user_key
18
+ can :transfer, String do |id|
19
+ depositor_for_document(id) == current_user.user_key
20
20
  end
21
21
  can :create, ProxyDepositRequest if user_groups.include? 'registered'
22
22
  can :accept, ProxyDepositRequest, receiving_user_id: current_user.id, status: 'pending'
@@ -47,10 +47,8 @@ module Sufia
47
47
 
48
48
  private
49
49
 
50
- def get_depositor_from_pid(pid)
51
- ::GenericFile.load_instance_from_solr(pid).depositor
52
- rescue
53
- nil
50
+ def depositor_for_document(document_id)
51
+ ::GenericFile.load_instance_from_solr(document_id).depositor
54
52
  end
55
53
  end
56
54
  end
@@ -11,7 +11,6 @@ module Sufia
11
11
  before_save :update_permissions
12
12
  validates :title, presence: true
13
13
 
14
- has_metadata "properties", type: PropertiesDatastream
15
14
  end
16
15
 
17
16
  def terms_for_display
@@ -35,10 +34,10 @@ module Sufia
35
34
  noid
36
35
  end
37
36
 
38
- def to_solr(solr_doc={}, opts={})
39
- super(solr_doc, opts)
40
- solr_doc[Solrizer.solr_name("noid", Sufia::GenericFile.noid_indexer)] = noid
41
- return solr_doc
37
+ def to_solr(solr_doc={})
38
+ super.tap do |solr_doc|
39
+ solr_doc[Solrizer.solr_name("noid", Sufia::GenericFile.noid_indexer)] = noid
40
+ end
42
41
  end
43
42
 
44
43
  def update_permissions
@@ -17,13 +17,13 @@ module Sufia
17
17
  {ga_start_date: ga_start_date, cached_stats: stats.to_a }
18
18
  end
19
19
 
20
- def combined_stats file_id, start_date, object_method, ga_key, user_id=nil
20
+ def combined_stats file_id, start_date, object_method, ga_key
21
21
  stat_cache_info = cached_stats( file_id, start_date, object_method)
22
22
  stats = stat_cache_info[:cached_stats]
23
23
  if stat_cache_info[:ga_start_date] < Date.today
24
24
  ga_stats = ga_statistics(stat_cache_info[:ga_start_date], file_id)
25
25
  ga_stats.each do |stat|
26
- lstat = self.new file_id: file_id, date: stat[:date], object_method => stat[ga_key], user_id: user_id
26
+ lstat = self.new file_id:file_id, date: stat[:date], object_method => stat[ga_key]
27
27
  lstat.save unless Date.parse(stat[:date]) == Date.today
28
28
  stats << lstat
29
29
  end
@@ -32,4 +32,4 @@ module Sufia
32
32
  end
33
33
 
34
34
  end
35
- end
35
+ end
@@ -17,17 +17,16 @@ module Sufia
17
17
  include Sufia::GenericFile::Metadata
18
18
  include Sufia::GenericFile::Versions
19
19
  include Sufia::GenericFile::VirusCheck
20
- include Sufia::GenericFile::ReloadOnSave
21
20
  include Sufia::GenericFile::FullTextIndexing
22
21
  include Sufia::GenericFile::ProxyDeposit
23
22
  include Hydra::Collections::Collectible
24
23
 
25
24
  included do
26
- belongs_to :batch, property: :is_part_of
25
+ belongs_to :batch, predicate: ActiveFedora::RDF::Fcrepo::RelsExt.isPartOf
27
26
 
28
- around_save :retry_warming
27
+ # around_save :retry_warming
29
28
 
30
- attr_accessible *(terms_for_display + [:part_of, :permissions])
29
+ attr_accessible *(terms_for_display + [:part_of, :permissions_attributes])
31
30
  end
32
31
 
33
32
  def persistent_url
@@ -37,6 +36,7 @@ module Sufia
37
36
  def retry_warming
38
37
  save_tries = 0
39
38
  conflict_tries = 0
39
+ etag_tries = 0
40
40
  begin
41
41
  yield
42
42
  rescue RSolr::Error::Http => error
@@ -46,6 +46,15 @@ module Sufia
46
46
  raise if save_tries >=3
47
47
  sleep 0.01
48
48
  retry
49
+ rescue Ldp::EtagMismatch
50
+ prev_changes = changes.dup
51
+ # There was a version conflict, so reload the previous version, then apply the changed attributes
52
+ reload
53
+ prev_changes.each do |key, (_, value)|
54
+ self[key] = value
55
+ end
56
+ raise if etag_tries >= 1
57
+ retry
49
58
  rescue ActiveResource::ResourceConflict => error
50
59
  conflict_tries += 1
51
60
  logger.warn "Retry caught Active Resource Conflict #{self.pid}: #{error.inspect}"
@@ -81,21 +90,14 @@ module Sufia
81
90
  @noid_indexer ||= Solrizer::Descriptor.new(:text, :indexed, :stored)
82
91
  end
83
92
 
84
- def to_solr(solr_doc={}, opts={})
85
- super(solr_doc, opts).tap do |solr_doc|
93
+ def to_solr(solr_doc={})
94
+ super.tap do |solr_doc|
86
95
  solr_doc[Solrizer.solr_name('label')] = self.label
87
96
  solr_doc[Solrizer.solr_name('noid', Sufia::GenericFile.noid_indexer)] = noid
88
97
  solr_doc[Solrizer.solr_name('file_format')] = file_format
89
98
  solr_doc[Solrizer.solr_name('file_format', :facetable)] = file_format
90
99
  solr_doc['all_text_timv'] = full_text.content
91
- solr_doc = index_collection_pids(solr_doc)
92
- end
93
- end
94
-
95
- def label=(new_label)
96
- @inner_object.label = new_label
97
- if self.title.empty?
98
- self.title = [new_label].compact
100
+ solr_doc = index_collection_ids(solr_doc)
99
101
  end
100
102
  end
101
103
 
@@ -5,25 +5,43 @@ module Sufia
5
5
 
6
6
  NO_RUNS = 999
7
7
 
8
- def audit(force = false)
9
- logs = []
10
- self.per_version do |ver|
11
- logs << audit_each(ver, force)
8
+ # provides a human readable version of the audit status
9
+ def human_readable_audit_status
10
+ stat = audit_stat(false)
11
+ case stat
12
+ when 0
13
+ 'failing'
14
+ when 1
15
+ 'passing'
16
+ else
17
+ stat
12
18
  end
13
- logs
14
19
  end
15
20
 
16
- def per_version(&block)
17
- self.datastreams.each do |dsid, ds|
18
- next if ds == full_text
19
- ds.versions.each do |ver|
20
- block.call(ver)
21
- end
21
+ # TODO: Run audits on all attached files. We're only audting "content" at tht moment
22
+ def audit force = false
23
+ @audit_log ||= Array.new
24
+ @force = force
25
+ audit_content
26
+ return @audit_log
27
+ end
28
+
29
+ def audit_content
30
+ if content.has_versions?
31
+ audit_file_versions("content")
32
+ else
33
+ @audit_log << audit_file("content", content.uri)
34
+ end
35
+ end
36
+
37
+ def audit_file_versions file
38
+ attached_files[file].versions.all.each do |version|
39
+ @audit_log << audit_file(file, version.uri, version.label)
22
40
  end
23
41
  end
24
42
 
25
- def logs(dsid)
26
- ChecksumAuditLog.where(dsid: dsid, pid: self.pid).order('created_at desc, id desc')
43
+ def logs(file)
44
+ ChecksumAuditLog.where(pid: self.id, dsid: file).order('created_at desc, id desc')
27
45
  end
28
46
 
29
47
  def audit!
@@ -51,16 +69,11 @@ module Sufia
51
69
  end
52
70
  end
53
71
 
54
- def audit_each(version, force = false)
55
- latest_audit = logs(version.dsid).first
56
- return latest_audit unless force || ::GenericFile.needs_audit?(version, latest_audit)
57
-
58
- # Resque.enqueue(AuditJob, version.pid, version.dsid, version.versionID)
59
- Sufia.queue.push(AuditJob.new(version.pid, version.dsid, version.versionID))
60
-
61
- # run the find just incase the job has finished already
62
- latest_audit = logs(version.dsid).first
63
- latest_audit = ChecksumAuditLog.new(pass: NO_RUNS, pid: version.pid, dsid: version.dsid, version: version.versionID) unless latest_audit
72
+ def audit_file(file, uri, label = nil)
73
+ latest_audit = logs(file).first
74
+ return latest_audit unless @force || ::GenericFile.needs_audit?(uri, latest_audit)
75
+ Sufia.queue.push(AuditJob.new(id, file, uri))
76
+ latest_audit ||= ChecksumAuditLog.new(pass: NO_RUNS, pid: id, dsid: file, version: label)
64
77
  latest_audit
65
78
  end
66
79
 
@@ -70,8 +83,9 @@ module Sufia
70
83
  ::GenericFile.audit(version, true)
71
84
  end
72
85
 
73
- def audit(version, force = false)
74
- latest_audit = self.find(version.pid).audit_each( version, force)
86
+ def audit(version_uri, force = false)
87
+ return { pass: true } # TODO Just skipping the audit for now
88
+ latest_audit = self.find(version_uri).audit_each( version, force)
75
89
  end
76
90
 
77
91
  def needs_audit?(version, latest_audit)
@@ -98,16 +112,21 @@ module Sufia
98
112
  ::GenericFile.audit_everything(true)
99
113
  end
100
114
 
101
- def run_audit(version)
102
- if version.dsChecksumValid
115
+ def run_audit(id, path, uri)
116
+ begin
117
+ fixity_ok = ActiveFedora::FixityService.new(uri).check
118
+ rescue Ldp::NotFound
119
+ error_msg = "resource not found"
120
+ end
121
+
122
+ if fixity_ok
103
123
  passing = 1
104
- ChecksumAuditLog.prune_history(version)
124
+ ChecksumAuditLog.prune_history(id, path)
105
125
  else
106
- logger.warn "***AUDIT*** Audit failed for #{version.pid} #{version.versionID}"
126
+ logger.warn "***AUDIT*** Audit failed for #{uri} #{error_msg}"
107
127
  passing = 0
108
128
  end
109
- check = ChecksumAuditLog.create!(pass: passing, pid: version.pid,
110
- dsid: version.dsid, version: version.versionID)
129
+ check = ChecksumAuditLog.create!(pass: passing, pid: id, version: uri, dsid: path)
111
130
  check
112
131
  end
113
132
  end