bulkrax 4.2.1 → 4.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/app/assets/javascripts/bulkrax/exporters.js +12 -0
- data/app/assets/javascripts/bulkrax/importers.js.erb +27 -1
- data/app/controllers/bulkrax/exporters_controller.rb +3 -1
- data/app/controllers/bulkrax/importers_controller.rb +1 -1
- data/app/factories/bulkrax/object_factory.rb +35 -8
- data/app/jobs/bulkrax/create_relationships_job.rb +1 -1
- data/app/jobs/bulkrax/import_work_job.rb +12 -10
- data/app/matchers/bulkrax/application_matcher.rb +1 -1
- data/app/models/bulkrax/csv_entry.rb +14 -10
- data/app/models/bulkrax/importer.rb +20 -15
- data/app/models/bulkrax/oai_entry.rb +1 -2
- data/app/models/concerns/bulkrax/file_set_entry_behavior.rb +8 -1
- data/app/models/concerns/bulkrax/import_behavior.rb +10 -9
- data/app/parsers/bulkrax/application_parser.rb +87 -14
- data/app/parsers/bulkrax/bagit_parser.rb +2 -1
- data/app/parsers/bulkrax/csv_parser.rb +11 -10
- data/app/parsers/bulkrax/oai_dc_parser.rb +2 -2
- data/app/services/bulkrax/remove_relationships_for_importer.rb +107 -0
- data/app/views/bulkrax/exporters/_form.html.erb +3 -3
- data/app/views/bulkrax/exporters/show.html.erb +17 -41
- data/app/views/bulkrax/importers/edit.html.erb +1 -1
- data/app/views/bulkrax/importers/new.html.erb +1 -1
- data/app/views/bulkrax/importers/show.html.erb +3 -114
- data/app/views/bulkrax/shared/_collection_entries_tab.html.erb +39 -0
- data/app/views/bulkrax/shared/_file_set_entries_tab.html.erb +39 -0
- data/app/views/bulkrax/shared/_work_entries_tab.html.erb +39 -0
- data/app/views/hyrax/dashboard/sidebar/_bulkrax_sidebar_additions.html.erb +7 -5
- data/app/views/hyrax/dashboard/sidebar/_repository_content.html.erb +23 -15
- data/db/migrate/20211203195233_rename_children_counters_to_relationships.rb +1 -1
- data/db/migrate/20211220195027_add_file_set_counters_to_importer_runs.rb +1 -1
- data/db/migrate/20220118001339_add_import_attempts_to_entries.rb +1 -1
- data/db/migrate/20220119213325_add_work_counters_to_importer_runs.rb +1 -1
- data/db/migrate/20220301001839_create_bulkrax_pending_relationships.rb +1 -1
- data/db/migrate/20220303212810_add_order_to_bulkrax_pending_relationships.rb +1 -1
- data/db/migrate/20220412233954_add_include_thumbnails_to_bulkrax_exporters.rb +1 -1
- data/db/migrate/20220413180915_add_generated_metadata_to_bulkrax_exporters.rb +1 -1
- data/db/migrate/20220609001128_rename_bulkrax_importer_run_to_importer_run.rb +1 -1
- data/lib/bulkrax/version.rb +1 -1
- data/lib/bulkrax.rb +38 -11
- data/lib/generators/bulkrax/templates/config/initializers/bulkrax.rb +10 -0
- data/lib/tasks/bulkrax_tasks.rake +10 -11
- data/lib/tasks/reset.rake +65 -0
- metadata +7 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d53012e0252f8033f5da334d5336c134deb3a6221040cfe4c1c6c01bb473d617
|
4
|
+
data.tar.gz: e986a3506c073aa533c4ea303a123e8866d2bf7ac6d8ab6df3949682f61b6c05
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8ea20593d1164f62fdfda2dbe84bd8cc4e199a65d7e85eea0b68b27e71aeff7579f1b0059042f55eec1c7ab36f8dec9db406ad58cc3a315b29b8e4900f7cb450
|
7
|
+
data.tar.gz: 89a03cd842d855d48f8c4952b6e9a6daea757e80cb8c0f9ee370b59f90e434b39dfb548deb1757f36463914bd5cccbcf1df6c5ffd9102bb5f0d54b5b7ea6476f
|
@@ -1,10 +1,12 @@
|
|
1
1
|
function hideUnhide(field) {
|
2
2
|
var allSources = $('body').find('.export-source-option')
|
3
|
+
removeRequired(allSources)
|
3
4
|
hide(allSources)
|
4
5
|
|
5
6
|
if (field.length > 0) {
|
6
7
|
var selectedSource = $('.' + field)
|
7
8
|
unhideSelected(selectedSource)
|
9
|
+
addRequired(selectedSource)
|
8
10
|
}
|
9
11
|
|
10
12
|
if (field === 'collection') {
|
@@ -12,6 +14,16 @@ function hideUnhide(field) {
|
|
12
14
|
}
|
13
15
|
};
|
14
16
|
|
17
|
+
function addRequired(selectedSource) {
|
18
|
+
selectedSource.addClass('required').attr('required', 'required');
|
19
|
+
selectedSource.parent().addClass('required');
|
20
|
+
}
|
21
|
+
|
22
|
+
function removeRequired(allSources) {
|
23
|
+
allSources.removeClass('required').removeAttr('required');
|
24
|
+
allSources.parent().removeClass('required').removeAttr('required')
|
25
|
+
};
|
26
|
+
|
15
27
|
// hide all export_source
|
16
28
|
function hide(allSources) {
|
17
29
|
allSources.addClass('hidden');
|
@@ -47,6 +47,7 @@ function prepBulkrax(event) {
|
|
47
47
|
var config = { childList: true, attributes: true };
|
48
48
|
var callback = function(mutationsList) {
|
49
49
|
for(var mutation of mutationsList) {
|
50
|
+
|
50
51
|
if (mutation.type == 'childList') {
|
51
52
|
browseButton = document.getElementById('browse');
|
52
53
|
var exp = /selected_files\[[0-9]*\]\[url\]/
|
@@ -119,10 +120,35 @@ function handleParserKlass() {
|
|
119
120
|
$('.parser_fields').append(window[parser_klass.data('partial')])
|
120
121
|
}
|
121
122
|
|
123
|
+
handleBrowseEverything()
|
122
124
|
var file_path_value = $('#importer_parser_fields_import_file_path').val()
|
123
125
|
handleFileToggle(file_path_value)
|
124
126
|
}
|
125
127
|
|
128
|
+
function handleBrowseEverything(){
|
129
|
+
var button = $("button[data-toggle='browse-everything']")
|
130
|
+
if(button.length == 0) { return; }
|
131
|
+
button.browseEverything({
|
132
|
+
route: button.data('route'),
|
133
|
+
target: button.data('target')
|
134
|
+
}).done(function(data) {
|
135
|
+
var evt = { isDefaultPrevented: function() { return false; } };
|
136
|
+
$('.ev-browser.show').removeClass('show')
|
137
|
+
if($('#fileupload').length > 0) {
|
138
|
+
var files = $.map(data, function(d) { return { name: d.file_name, size: d.file_size, id: d.url } });
|
139
|
+
$.blueimp.fileupload.prototype.options.done.call($('#fileupload').fileupload(), evt, { result: { files: files }});
|
140
|
+
}
|
141
|
+
return true
|
142
|
+
// User has submitted files; data contains an array of URLs and their options
|
143
|
+
}).cancel(function() {
|
144
|
+
$('.ev-browser.show').removeClass('show')
|
145
|
+
// User cancelled the browse operation
|
146
|
+
}).fail(function(status, error, text) {
|
147
|
+
$('.ev-browser.show').removeClass('show')
|
148
|
+
// URL retrieval experienced a technical failure
|
149
|
+
});
|
150
|
+
}
|
151
|
+
|
126
152
|
function handleSourceLoad(refresh_button, base_url, external_set_select) {
|
127
153
|
if (base_url.val() == "") { // ignore empty base_url value
|
128
154
|
return
|
@@ -163,4 +189,4 @@ function setError(selector, error) {
|
|
163
189
|
selector.attr('disabled', true)
|
164
190
|
}
|
165
191
|
|
166
|
-
$(document).on({'ready': prepBulkrax, 'turbolinks:load': prepBulkrax})
|
192
|
+
$(document).on({'ready': prepBulkrax, 'turbolinks:load': prepBulkrax})
|
@@ -22,7 +22,9 @@ module Bulkrax
|
|
22
22
|
add_exporter_breadcrumbs
|
23
23
|
add_breadcrumb @exporter.name
|
24
24
|
|
25
|
-
@work_entries = @exporter.entries.where(type: @exporter.parser.entry_class.to_s).page(params[:work_entries_page])
|
25
|
+
@work_entries = @exporter.entries.where(type: @exporter.parser.entry_class.to_s).page(params[:work_entries_page]).per(30)
|
26
|
+
@collection_entries = @exporter.entries.where(type: @exporter.parser.collection_entry_class.to_s).page(params[:collections_entries_page]).per(30)
|
27
|
+
@file_set_entries = @exporter.entries.where(type: @exporter.parser.file_set_entry_class.to_s).page(params[:file_set_entries_page]).per(30)
|
26
28
|
end
|
27
29
|
|
28
30
|
# GET /exporters/new
|
@@ -276,7 +276,7 @@ module Bulkrax
|
|
276
276
|
def setup_client(url)
|
277
277
|
return false if url.nil?
|
278
278
|
headers = { from: Bulkrax.server_name }
|
279
|
-
@client ||= OAI::Client.new(url, headers: headers, parser: 'libxml'
|
279
|
+
@client ||= OAI::Client.new(url, headers: headers, parser: 'libxml')
|
280
280
|
end
|
281
281
|
|
282
282
|
# Download methods
|
@@ -159,28 +159,55 @@ module Bulkrax
|
|
159
159
|
file_set_attrs = attrs.slice(*object.attributes.keys)
|
160
160
|
object.assign_attributes(file_set_attrs)
|
161
161
|
|
162
|
-
attrs['uploaded_files']
|
162
|
+
attrs['uploaded_files']&.each do |uploaded_file_id|
|
163
163
|
uploaded_file = ::Hyrax::UploadedFile.find(uploaded_file_id)
|
164
164
|
next if uploaded_file.file_set_uri.present?
|
165
165
|
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
actor.create_content(uploaded_file)
|
171
|
-
actor.attach_to_work(work)
|
166
|
+
create_file_set_actor(attrs, work, work_permissions, uploaded_file)
|
167
|
+
end
|
168
|
+
attrs['remote_files']&.each do |remote_file|
|
169
|
+
create_file_set_actor(attrs, work, work_permissions, nil, remote_file)
|
172
170
|
end
|
173
171
|
|
174
172
|
object.save!
|
175
173
|
end
|
176
174
|
|
175
|
+
def create_file_set_actor(attrs, work, work_permissions, uploaded_file, remote_file = nil)
|
176
|
+
actor = ::Hyrax::Actors::FileSetActor.new(object, @user)
|
177
|
+
uploaded_file&.update(file_set_uri: actor.file_set.uri)
|
178
|
+
actor.file_set.permissions_attributes = work_permissions
|
179
|
+
actor.create_metadata(attrs)
|
180
|
+
actor.create_content(uploaded_file) if uploaded_file
|
181
|
+
actor.attach_to_work(work, attrs)
|
182
|
+
handle_remote_file(remote_file: remote_file, actor: actor, update: false) if remote_file
|
183
|
+
end
|
184
|
+
|
177
185
|
def update_file_set(attrs)
|
178
186
|
file_set_attrs = attrs.slice(*object.attributes.keys)
|
179
187
|
actor = ::Hyrax::Actors::FileSetActor.new(object, @user)
|
180
|
-
|
188
|
+
attrs['remote_files']&.each do |remote_file|
|
189
|
+
handle_remote_file(remote_file: remote_file, actor: actor, update: true)
|
190
|
+
end
|
181
191
|
actor.update_metadata(file_set_attrs)
|
182
192
|
end
|
183
193
|
|
194
|
+
def handle_remote_file(remote_file:, actor:, update: false)
|
195
|
+
actor.file_set.label = remote_file['file_name']
|
196
|
+
actor.file_set.import_url = remote_file['url']
|
197
|
+
|
198
|
+
url = remote_file['url']
|
199
|
+
tmp_file = Tempfile.new(remote_file['file_name'].split('.').first)
|
200
|
+
tmp_file.binmode
|
201
|
+
|
202
|
+
URI.open(url) do |url_file|
|
203
|
+
tmp_file.write(url_file.read)
|
204
|
+
end
|
205
|
+
|
206
|
+
tmp_file.rewind
|
207
|
+
update == true ? actor.update_content(tmp_file) : actor.create_content(tmp_file, from_url: true)
|
208
|
+
tmp_file.close
|
209
|
+
end
|
210
|
+
|
184
211
|
def clean_attrs(attrs)
|
185
212
|
# avoid the "ArgumentError: Identifier must be a string of size > 0 in order to be treeified" error
|
186
213
|
# when setting object.attributes
|
@@ -81,7 +81,7 @@ module Bulkrax
|
|
81
81
|
# This is adding the reverse relationship, from the child to the parent
|
82
82
|
def collection_parent_work_child
|
83
83
|
child_work_ids = child_records[:works].map(&:id)
|
84
|
-
parent_record.reindex_extent
|
84
|
+
parent_record.try(:reindex_extent=, Hyrax::Adapters::NestingIndexAdapter::LIMITED_REINDEX)
|
85
85
|
|
86
86
|
parent_record.add_member_objects(child_work_ids)
|
87
87
|
ImporterRun.find(importer_run_id).increment!(:processed_relationships, child_work_ids.count) # rubocop:disable Rails/SkipsModelValidations
|
@@ -5,25 +5,27 @@ module Bulkrax
|
|
5
5
|
queue_as :import
|
6
6
|
|
7
7
|
# rubocop:disable Rails/SkipsModelValidations
|
8
|
-
def perform(*
|
9
|
-
entry = Entry.find(
|
8
|
+
def perform(entry_id, run_id, *)
|
9
|
+
entry = Entry.find(entry_id)
|
10
|
+
importer_run = ImporterRun.find(run_id)
|
10
11
|
entry.build
|
11
12
|
if entry.status == "Complete"
|
12
|
-
|
13
|
-
|
14
|
-
ImporterRun.find(args[1]).decrement!(:enqueued_records) unless ImporterRun.find(args[1]).enqueued_records <= 0 # rubocop:disable Style/IdenticalConditionalBranches
|
13
|
+
importer_run.increment!(:processed_records)
|
14
|
+
importer_run.increment!(:processed_works)
|
15
15
|
else
|
16
16
|
# do not retry here because whatever parse error kept you from creating a work will likely
|
17
17
|
# keep preventing you from doing so.
|
18
|
-
|
19
|
-
|
20
|
-
ImporterRun.find(args[1]).decrement!(:enqueued_records) unless ImporterRun.find(args[1]).enqueued_records <= 0 # rubocop:disable Style/IdenticalConditionalBranches
|
18
|
+
importer_run.increment!(:failed_records)
|
19
|
+
importer_run.increment!(:failed_works)
|
21
20
|
end
|
21
|
+
# Regardless of completion or not, we want to decrement the enqueued records.
|
22
|
+
importer_run.decrement!(:enqueued_records) unless importer_run.enqueued_records <= 0
|
23
|
+
|
22
24
|
entry.save!
|
23
|
-
entry.importer.current_run =
|
25
|
+
entry.importer.current_run = importer_run
|
24
26
|
entry.importer.record_status
|
25
27
|
rescue Bulkrax::CollectionsCreatedError
|
26
|
-
reschedule(
|
28
|
+
reschedule(entry_id, run_id)
|
27
29
|
end
|
28
30
|
# rubocop:enable Rails/SkipsModelValidations
|
29
31
|
|
@@ -30,7 +30,7 @@ module Bulkrax
|
|
30
30
|
|
31
31
|
def process_split
|
32
32
|
if self.split.is_a?(TrueClass)
|
33
|
-
@result = @result.split(
|
33
|
+
@result = @result.split(Bulkrax.multi_value_element_split_on)
|
34
34
|
elsif self.split
|
35
35
|
result = @result.split(Regexp.new(self.split))
|
36
36
|
@result = result.map(&:strip)
|
@@ -18,7 +18,7 @@ module Bulkrax
|
|
18
18
|
raise StandardError, 'CSV path empty' if path.blank?
|
19
19
|
CSV.read(path,
|
20
20
|
headers: true,
|
21
|
-
header_converters:
|
21
|
+
header_converters: ->(h) { h.to_sym },
|
22
22
|
encoding: 'utf-8')
|
23
23
|
end
|
24
24
|
|
@@ -81,7 +81,7 @@ module Bulkrax
|
|
81
81
|
def add_file
|
82
82
|
self.parsed_metadata['file'] ||= []
|
83
83
|
if record['file']&.is_a?(String)
|
84
|
-
self.parsed_metadata['file'] = record['file'].split(
|
84
|
+
self.parsed_metadata['file'] = record['file'].split(Bulkrax.multi_value_element_split_on)
|
85
85
|
elsif record['file'].is_a?(Array)
|
86
86
|
self.parsed_metadata['file'] = record['file']
|
87
87
|
end
|
@@ -112,12 +112,16 @@ module Bulkrax
|
|
112
112
|
end
|
113
113
|
|
114
114
|
def build_files_metadata
|
115
|
-
|
116
|
-
|
117
|
-
|
115
|
+
# attaching files to the FileSet row only so we don't have duplicates when importing to a new tenant
|
116
|
+
if hyrax_record.work?
|
117
|
+
build_thumbnail_files
|
118
|
+
else
|
119
|
+
file_mapping = key_for_export('file')
|
120
|
+
file_sets = hyrax_record.file_set? ? Array.wrap(hyrax_record) : hyrax_record.file_sets
|
121
|
+
filenames = map_file_sets(file_sets)
|
118
122
|
|
119
|
-
|
120
|
-
|
123
|
+
handle_join_on_export(file_mapping, filenames, mapping['file']&.[]('join')&.present?)
|
124
|
+
end
|
121
125
|
end
|
122
126
|
|
123
127
|
def build_relationship_metadata
|
@@ -172,7 +176,7 @@ module Bulkrax
|
|
172
176
|
data = hyrax_record.send(key.to_s)
|
173
177
|
if data.is_a?(ActiveTriples::Relation)
|
174
178
|
if value['join']
|
175
|
-
self.parsed_metadata[key_for_export(key)] = data.map { |d| prepare_export_data(d) }.join(
|
179
|
+
self.parsed_metadata[key_for_export(key)] = data.map { |d| prepare_export_data(d) }.join(Bulkrax.multi_value_element_join_on).to_s
|
176
180
|
else
|
177
181
|
data.each_with_index do |d, i|
|
178
182
|
self.parsed_metadata["#{key_for_export(key)}_#{i + 1}"] = prepare_export_data(d)
|
@@ -232,7 +236,7 @@ module Bulkrax
|
|
232
236
|
|
233
237
|
def handle_join_on_export(key, values, join)
|
234
238
|
if join
|
235
|
-
parsed_metadata[key] = values.join(
|
239
|
+
parsed_metadata[key] = values.join(Bulkrax.multi_value_element_join_on)
|
236
240
|
else
|
237
241
|
values.each_with_index do |value, i|
|
238
242
|
parsed_metadata["#{key}_#{i + 1}"] = value
|
@@ -256,7 +260,7 @@ module Bulkrax
|
|
256
260
|
return [] unless parent_field_mapping.present? && record[parent_field_mapping].present?
|
257
261
|
|
258
262
|
identifiers = []
|
259
|
-
split_references = record[parent_field_mapping].split(
|
263
|
+
split_references = record[parent_field_mapping].split(Bulkrax.multi_value_element_split_on)
|
260
264
|
split_references.each do |c_reference|
|
261
265
|
matching_collection_entries = importerexporter.entries.select do |e|
|
262
266
|
(e.raw_metadata&.[](source_identifier) == c_reference) &&
|
@@ -58,17 +58,26 @@ module Bulkrax
|
|
58
58
|
|
59
59
|
# If field_mapping is empty, setup a default based on the export_properties
|
60
60
|
def mapping
|
61
|
+
# rubocop:disable Style/IfUnlessModifier
|
61
62
|
@mapping ||= if self.field_mapping.blank? || self.field_mapping == [{}]
|
62
63
|
if parser.import_fields.present? || self.field_mapping == [{}]
|
63
|
-
|
64
|
-
parser.import_fields.reject(&:nil?).map do |m|
|
65
|
-
Bulkrax.default_field_mapping.call(m)
|
66
|
-
end.inject(:merge)
|
67
|
-
)
|
64
|
+
default_field_mapping
|
68
65
|
end
|
69
66
|
else
|
70
|
-
self.field_mapping
|
67
|
+
default_field_mapping.merge(self.field_mapping)
|
71
68
|
end
|
69
|
+
|
70
|
+
# rubocop:enable Style/IfUnlessModifier
|
71
|
+
end
|
72
|
+
|
73
|
+
def default_field_mapping
|
74
|
+
return self.field_mapping if parser.import_fields.nil?
|
75
|
+
|
76
|
+
ActiveSupport::HashWithIndifferentAccess.new(
|
77
|
+
parser.import_fields.reject(&:nil?).map do |m|
|
78
|
+
Bulkrax.default_field_mapping.call(m)
|
79
|
+
end.inject(:merge)
|
80
|
+
)
|
72
81
|
end
|
73
82
|
|
74
83
|
def parser_fields
|
@@ -143,17 +152,13 @@ module Bulkrax
|
|
143
152
|
import_objects(['relationship'])
|
144
153
|
end
|
145
154
|
|
155
|
+
DEFAULT_OBJECT_TYPES = %w[collection work file_set relationship].freeze
|
156
|
+
|
146
157
|
def import_objects(types_array = nil)
|
147
158
|
self.only_updates ||= false
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
else
|
152
|
-
types.each do |object_type|
|
153
|
-
self.save if self.new_record? # Object needs to be saved for statuses
|
154
|
-
parser.send("create_#{object_type.pluralize}")
|
155
|
-
end
|
156
|
-
end
|
159
|
+
self.save if self.new_record? # Object needs to be saved for statuses
|
160
|
+
types = types_array || DEFAULT_OBJECT_TYPES
|
161
|
+
parser.create_objects(types)
|
157
162
|
rescue StandardError => e
|
158
163
|
status_info(e)
|
159
164
|
end
|
@@ -59,12 +59,11 @@ module Bulkrax
|
|
59
59
|
def find_collection_ids
|
60
60
|
return self.collection_ids if collections_created?
|
61
61
|
if sets.blank? || parser.collection_name != 'all'
|
62
|
-
# c = Collection.where(Bulkrax.system_identifier_field => importerexporter.unique_collection_identifier(parser.collection_name)).first
|
63
62
|
collection = find_collection(importerexporter.unique_collection_identifier(parser.collection_name))
|
64
63
|
self.collection_ids << collection.id if collection.present? && !self.collection_ids.include?(collection.id)
|
65
64
|
else # All - collections should exist for all sets
|
66
65
|
sets.each do |set|
|
67
|
-
c =
|
66
|
+
c = find_collection(importerexporter.unique_collection_identifier(set.content))
|
68
67
|
self.collection_ids << c.id if c.present? && !self.collection_ids.include?(c.id)
|
69
68
|
end
|
70
69
|
end
|
@@ -6,7 +6,14 @@ module Bulkrax
|
|
6
6
|
::FileSet
|
7
7
|
end
|
8
8
|
|
9
|
+
def file_reference
|
10
|
+
return 'file' if parsed_metadata&.[]('file')&.map(&:present?)&.any?
|
11
|
+
return 'remote_files' if parsed_metadata&.[]('remote_files')&.map(&:present?)&.any?
|
12
|
+
end
|
13
|
+
|
9
14
|
def add_path_to_file
|
15
|
+
return unless file_reference == 'file'
|
16
|
+
|
10
17
|
parsed_metadata['file'].each_with_index do |filename, i|
|
11
18
|
next if filename.blank?
|
12
19
|
|
@@ -22,7 +29,7 @@ module Bulkrax
|
|
22
29
|
end
|
23
30
|
|
24
31
|
def validate_presence_of_filename!
|
25
|
-
return if parsed_metadata&.[](
|
32
|
+
return if parsed_metadata&.[](file_reference)&.map(&:present?)&.any?
|
26
33
|
|
27
34
|
raise StandardError, 'File set must have a filename'
|
28
35
|
end
|
@@ -165,15 +165,16 @@ module Bulkrax
|
|
165
165
|
end
|
166
166
|
|
167
167
|
def factory
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
168
|
+
of = Bulkrax.object_factory || Bulkrax::ObjectFactory
|
169
|
+
@factory ||= of.new(attributes: self.parsed_metadata,
|
170
|
+
source_identifier_value: identifier,
|
171
|
+
work_identifier: parser.work_identifier,
|
172
|
+
related_parents_parsed_mapping: parser.related_parents_parsed_mapping,
|
173
|
+
replace_files: replace_files,
|
174
|
+
user: user,
|
175
|
+
klass: factory_class,
|
176
|
+
importer_run_id: importerexporter.last_run.id,
|
177
|
+
update_files: update_files)
|
177
178
|
end
|
178
179
|
|
179
180
|
def factory_class
|