bulk_ops 0.1.18 → 0.1.19

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a60889c5673952113a76b07c00b17b6ac5f99a4a2a3ac1099b3e2cb58ac04fd7
4
- data.tar.gz: f7e2488b18b09092e90ef1fb3dad38e460d75bdfb355942065be98e28e743008
3
+ metadata.gz: 980d5fec0bcc94d2199356f379adeb0cee99a748b159a5b7d2934e26bdf53c9d
4
+ data.tar.gz: c4c6ef9e463d35fd408ac6c477a47bee55eb7833c374269e4bc3a98d881713b2
5
5
  SHA512:
6
- metadata.gz: df3fc8ea76aae33515e73202d16a73ddbd86a3ae3c7a73245ec303c7599751b8c8e9961c1deee5971339cebd5bb9cd6ece60a962bebc6747c55244002a212326
7
- data.tar.gz: cc1f2e6360da271a4203c5a6ccc6c67c793f7c5f5ce13b671ad79532c47a9044e76db9131af18090885b06b17aec9960a828db7bb2417a1d82a9afc6df90c6ba
6
+ metadata.gz: d9511dad25f5d1f489ad08415b7750fd2264d214b057ea9b2af0fbd2ee83998cbb25f631b8e1432b958bd0a90c85a4507cbc363e21316e981090aa6b42697215
7
+ data.tar.gz: 90df61d91db0671690c2fc8199d75feadea833c5ee23e8e4778c855f7a36297cec3ea191741b33d1ad1409e4d43020888fc37f037280de7f0578b9cda285159d
@@ -97,7 +97,7 @@ module BulkOps
97
97
  # make sure the work proxies we just created are loaded in memory
98
98
  reload
99
99
  #loop through the work proxies to create a job for each work
100
- @metadata.dup.each_with_index do |values,row_number|
100
+ @metadata.each_with_index do |values,row_number|
101
101
  proxy = work_proxies.find_by(row_number: row_number)
102
102
  proxy.update(message: "interpreted at #{DateTime.now.strftime("%d/%m/%Y %H:%M")} " + proxy.message)
103
103
  data = BulkOps::Parser.new(proxy, @metadata).interpret_data(raw_row: values)
@@ -20,7 +20,7 @@ class BulkOps::Parser
20
20
  def initialize prx, metadata_sheet=nil
21
21
  @proxy = prx
22
22
  @raw_data = (metadata_sheet || proxy.operation.metadata)
23
- @raw_row = @raw_data[@proxy.row_number].dup
23
+ @raw_row = @raw_data[@proxy.row_number]
24
24
  @metadata = {}
25
25
  @parsing_errors = []
26
26
  end
@@ -29,13 +29,13 @@ class BulkOps::Parser
29
29
  @raw_row = raw_row if raw_row.present?
30
30
  @proxy = proxy if proxy.present?
31
31
  @raw_data = raw_data if raw_data.present?
32
+ disambiguate_columns
32
33
  setAdminSet
33
34
  #The order here matters a little: interpreting the relationship fields specifies containing collections,
34
35
  # which may have opinions about whether we should inherit metadata from parent works
35
36
  interpret_relationship_fields
36
37
  setMetadataInheritance
37
38
  interpret_option_fields
38
- disambiguate_columns
39
39
  interpret_file_fields
40
40
  interpret_controlled_fields
41
41
  interpret_scalar_fields
@@ -54,8 +54,7 @@ class BulkOps::Parser
54
54
  # separate values in identical columns using the separator
55
55
  row[header] = (Array(row[header]) << value).join(BulkOps::SEPARATOR)
56
56
  end
57
- #return a hash with identical columns merged
58
- return row
57
+ @raw_row = row
59
58
  end
60
59
 
61
60
  def interpret_controlled_fields
@@ -67,7 +66,6 @@ class BulkOps::Parser
67
66
  # This hash is populated with relevant data as we loop through the fields
68
67
  controlled_data = {}
69
68
 
70
- row = @raw_row.dup
71
69
  @raw_row.each do |field_name, value|
72
70
  next if value.blank? or field_name.blank?
73
71
  field_name = field_name.to_s
@@ -122,10 +120,8 @@ class BulkOps::Parser
122
120
  row_number: row_number) unless value_id
123
121
  end
124
122
  controlled_data[field_name_norm] << {id: value_id, remove: field_name.downcase.starts_with?("remove")}
125
- row.delete(field_name)
126
123
  end
127
124
  end
128
- @raw_row = row
129
125
 
130
126
  # Actually add all the data
131
127
  controlled_data.each do |property_name, data|
@@ -139,8 +135,7 @@ class BulkOps::Parser
139
135
  end
140
136
 
141
137
  def interpret_scalar_fields
142
- row = @raw_row.dup
143
- @raw_row.each do |field, values|
138
+ @raw_row.each do |field, values|
144
139
  next if values.blank? or field.nil? or field == values
145
140
  # get the field name, if this column is a metadata field
146
141
  next unless field_name = find_field_name(field.to_s)
@@ -152,11 +147,9 @@ class BulkOps::Parser
152
147
  value = value.strip.encode('utf-8', :invalid => :replace, :undef => :replace, :replace => '_') unless value.blank?
153
148
  value = unescape_csv(value)
154
149
  (@metadata[field_name] ||= []) << value
155
- row.delete(field)
156
- end
150
+ end
157
151
  end
158
- @raw_row = row
159
- end
152
+ end
160
153
 
161
154
  def interpret_file_fields
162
155
  # This method handles file additions and deletions from the spreadsheet
@@ -165,7 +158,6 @@ class BulkOps::Parser
165
158
  # by the BulkOps::Operation.
166
159
  #
167
160
 
168
- row = @raw_row.dup
169
161
  @raw_row.each do |field, value|
170
162
  next if value.blank? or field.blank?
171
163
  field = field.to_s
@@ -187,7 +179,6 @@ class BulkOps::Parser
187
179
  begin
188
180
  uploaded_file = Hyrax::UploadedFile.create(file: File.open(filepath), user: operation.user)
189
181
  (@metadata[:uploaded_files] ||= []) << uploaded_file.id unless uploaded_file.id.nil?
190
- row.delete(field)
191
182
  rescue Exception => e
192
183
  report_error(:upload_error,
193
184
  message: "Error opening file: #{ filepath } -- #{e}",
@@ -213,11 +204,9 @@ class BulkOps::Parser
213
204
  end
214
205
 
215
206
  end
216
- @raw_row = row
217
207
  end
218
208
 
219
209
  def interpret_option_fields
220
- row = @raw_row.dup
221
210
  @raw_row.each do |field,value|
222
211
  next if value.blank? or field.blank?
223
212
  field = field.to_s
@@ -226,11 +215,10 @@ class BulkOps::Parser
226
215
  normfield = field.downcase.parameterize.gsub(/[_\s-]/,'')
227
216
  if ["visibility", "public"].include?(normfield)
228
217
  @proxy.update(visibility: format_visibility(value))
229
- row.delete(field)
218
+
230
219
  end
231
220
  if ["worktype","model","type"].include?(normfield)
232
221
  @proxy.update(work_type: format_worktype(value) )
233
- row.delete(field)
234
222
  end
235
223
  if ["referenceidentifier",
236
224
  "referenceid",
@@ -245,14 +233,11 @@ class BulkOps::Parser
245
233
  "relid",
246
234
  "relidtype"].include?(normfield)
247
235
  @proxy.update(reference_identifier: format_reference_id(value))
248
- row.delete(field)
249
236
  end
250
237
  end
251
- @raw_row = row
252
238
  end
253
239
 
254
240
  def interpret_relationship_fields
255
- row = @raw_row.dup
256
241
  @raw_row.each do |field,value|
257
242
  next if value.blank? or field.blank?
258
243
  field = field.to_s
@@ -274,14 +259,12 @@ class BulkOps::Parser
274
259
  when "order"
275
260
  # If the field specifies the object's order among siblings
276
261
  @proxy.update(order: value.to_f)
277
- row.delete(field)
278
262
  next
279
263
  when "collection"
280
264
  # If the field specifies the name or ID of a collection,
281
265
  # find or create the collection and update the metadata to match
282
266
  col = find_or_create_collection(value)
283
267
  ( @metadata[:member_of_collection_ids] ||= [] ) << col.id if col
284
- row.delete field
285
268
  next
286
269
  when "parent", "child"
287
270
 
@@ -306,10 +289,8 @@ class BulkOps::Parser
306
289
  end
307
290
  end
308
291
  BulkOps::Relationship.create(relationship_parameters)
309
- row.delete field
310
292
  end
311
293
  end
312
- @raw_row = row
313
294
  end
314
295
 
315
296
  def self.normalize_relationship_field_name field
@@ -1,3 +1,3 @@
1
1
  module BulkOps
2
- VERSION = "0.1.18"
2
+ VERSION = "0.1.19"
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bulk_ops
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.18
4
+ version: 0.1.19
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ned Henry, UCSC Library Digital Initiatives
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-10-15 00:00:00.000000000 Z
11
+ date: 2019-10-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails