datashift 0.13.0 → 0.14.0
Sign up to get free protection for your applications and to get access to all the features.
- data/README.markdown +36 -66
- data/VERSION +1 -1
- data/lib/applications/jexcel_file.rb +12 -5
- data/lib/datashift.rb +18 -13
- data/lib/datashift/delimiters.rb +0 -1
- data/lib/{guards.rb → datashift/guards.rb} +0 -0
- data/lib/datashift/method_detail.rb +4 -67
- data/lib/datashift/method_details_manager.rb +18 -6
- data/lib/datashift/method_dictionary.rb +55 -38
- data/lib/datashift/method_mapper.rb +18 -14
- data/lib/datashift/populator.rb +259 -6
- data/lib/exporters/csv_exporter.rb +28 -19
- data/lib/exporters/excel_exporter.rb +18 -5
- data/lib/generators/excel_generator.rb +2 -0
- data/lib/loaders/excel_loader.rb +2 -1
- data/lib/loaders/loader_base.rb +53 -142
- data/lib/loaders/paperclip/attachment_loader.rb +1 -1
- data/lib/loaders/paperclip/datashift_paperclip.rb +51 -44
- data/lib/thor/export.thor +65 -0
- data/lib/thor/generate.thor +68 -4
- data/spec/Gemfile +12 -8
- data/spec/Gemfile.lock +93 -93
- data/spec/csv_exporter_spec.rb +50 -12
- data/spec/excel_exporter_spec.rb +35 -3
- data/spec/excel_loader_spec.rb +9 -7
- data/spec/excel_spec.rb +26 -5
- data/spec/{loader_spec.rb → loader_base_spec.rb} +13 -1
- data/spec/method_dictionary_spec.rb +77 -70
- data/spec/paperclip_loader_spec.rb +1 -1
- data/spec/populator_spec.rb +94 -0
- data/spec/thor_spec.rb +1 -1
- metadata +70 -68
@@ -15,17 +15,30 @@ module DataShift
|
|
15
15
|
require 'excel'
|
16
16
|
|
17
17
|
class ExcelExporter < ExporterBase
|
18
|
-
|
19
|
-
|
18
|
+
|
19
|
+
include DataShift::Logging
|
20
20
|
|
21
21
|
def initialize(filename)
|
22
22
|
@filename = filename
|
23
23
|
end
|
24
24
|
|
25
|
-
|
26
25
|
# Create an Excel file from list of ActiveRecord objects
|
27
|
-
def export(
|
28
|
-
|
26
|
+
def export(export_records, options = {})
|
27
|
+
|
28
|
+
records = [*export_records]
|
29
|
+
|
30
|
+
puts records, records.inspect
|
31
|
+
|
32
|
+
unless(records && records.size > 0)
|
33
|
+
logger.warn("No objects supplied for export")
|
34
|
+
return
|
35
|
+
end
|
36
|
+
|
37
|
+
first = records[0]
|
38
|
+
|
39
|
+
raise ArgumentError.new('Please supply set of ActiveRecord objects to export') unless(first.is_a?(ActiveRecord::Base))
|
40
|
+
|
41
|
+
|
29
42
|
raise ArgumentError.new('Please supply array of records to export') unless records.is_a? Array
|
30
43
|
|
31
44
|
excel = Excel.new
|
@@ -25,6 +25,7 @@ module DataShift
|
|
25
25
|
|
26
26
|
# Create an Excel file template (header row) representing supplied Model
|
27
27
|
# Options:
|
28
|
+
# * <tt>:filename</tt>
|
28
29
|
# * <tt>:autosize</tt> - Autosize all the columns
|
29
30
|
#
|
30
31
|
def generate(klass, options = {})
|
@@ -51,6 +52,7 @@ module DataShift
|
|
51
52
|
# and it's associations
|
52
53
|
#
|
53
54
|
# Options:
|
55
|
+
# * <tt>:filename</tt>
|
54
56
|
# * <tt>:autosize</tt> - Autosize all the columns
|
55
57
|
#
|
56
58
|
# * <tt>:exclude</tt> - Association TYPE(s) to exclude.
|
data/lib/loaders/excel_loader.rb
CHANGED
@@ -132,10 +132,11 @@ module DataShift
|
|
132
132
|
|
133
133
|
if(verbose)
|
134
134
|
puts "Failed to process row [#{i}] (#{@current_row})"
|
135
|
-
puts e.inspect
|
135
|
+
puts e.inspect, e.backtrace
|
136
136
|
end
|
137
137
|
|
138
138
|
logger.error "Failed to process row [#{i}] (#{@current_row})"
|
139
|
+
logger.error e.backtrace
|
139
140
|
|
140
141
|
# don't forget to reset the load object
|
141
142
|
new_load_object
|
data/lib/loaders/loader_base.rb
CHANGED
@@ -18,7 +18,6 @@ module DataShift
|
|
18
18
|
class LoaderBase
|
19
19
|
|
20
20
|
include DataShift::Logging
|
21
|
-
include DataShift::Populator
|
22
21
|
include DataShift::Querying
|
23
22
|
|
24
23
|
attr_reader :headers
|
@@ -26,10 +25,10 @@ module DataShift
|
|
26
25
|
attr_accessor :method_mapper
|
27
26
|
|
28
27
|
attr_accessor :load_object_class, :load_object
|
29
|
-
attr_accessor :current_value, :current_method_detail
|
30
28
|
|
31
29
|
attr_accessor :reporter
|
32
|
-
|
30
|
+
attr_accessor :populator
|
31
|
+
|
33
32
|
attr_accessor :config, :verbose
|
34
33
|
|
35
34
|
def options() return @config; end
|
@@ -50,10 +49,20 @@ module DataShift
|
|
50
49
|
def initialize(object_class, find_operators = true, object = nil, options = {})
|
51
50
|
@load_object_class = object_class
|
52
51
|
|
52
|
+
@populator = if(options[:populator].is_a?(String))
|
53
|
+
::Object.const_get(options[:populator]).new
|
54
|
+
elsif(options[:populator].is_a?(Class))
|
55
|
+
options[:populator].new
|
56
|
+
else
|
57
|
+
DataShift::Populator.new
|
58
|
+
end
|
59
|
+
|
53
60
|
# Gather names of all possible 'setter' methods on AR class (instance variables and associations)
|
54
61
|
if((find_operators && !MethodDictionary::for?(object_class)) || options[:reload])
|
55
62
|
#puts "DEBUG Building Method Dictionary for class #{object_class}"
|
56
|
-
|
63
|
+
|
64
|
+
meth_dict_opts = options.extract!(:reload, :instance_methods)
|
65
|
+
DataShift::MethodDictionary.find_operators( @load_object_class, meth_dict_opts)
|
57
66
|
|
58
67
|
# Create dictionary of data on all possible 'setter' methods which can be used to
|
59
68
|
# populate or integrate an object of type @load_object_class
|
@@ -67,15 +76,7 @@ module DataShift
|
|
67
76
|
|
68
77
|
puts "Verbose Mode" if(verbose)
|
69
78
|
@headers = []
|
70
|
-
|
71
|
-
@default_data_objects ||= {}
|
72
|
-
|
73
|
-
@default_values = {}
|
74
|
-
@override_values = {}
|
75
|
-
|
76
|
-
@prefixes = {}
|
77
|
-
@postfixes = {}
|
78
|
-
|
79
|
+
|
79
80
|
@reporter = DataShift::Reporter.new
|
80
81
|
|
81
82
|
reset(object)
|
@@ -136,9 +137,13 @@ module DataShift
|
|
136
137
|
#
|
137
138
|
# [:force_inclusion] : List of columns that do not map to any operator but should be includeed in processing.
|
138
139
|
#
|
139
|
-
# This provides the opportunity for
|
140
|
-
#
|
140
|
+
# This provides the opportunity for :
|
141
|
+
#
|
142
|
+
# 1) loaders to provide specific methods to handle these fields, when no direct operator
|
143
|
+
# is available on the model or it's associations
|
141
144
|
#
|
145
|
+
# 2) Handle delegated methods i.e no direct association but method is on a model throuygh it's delegate
|
146
|
+
#
|
142
147
|
# [:include_all] : Include all headers in processing - takes precedence of :force_inclusion
|
143
148
|
#
|
144
149
|
def populate_method_mapper_from_headers( headers, options = {} )
|
@@ -174,9 +179,9 @@ module DataShift
|
|
174
179
|
# the incoming import format
|
175
180
|
def process_missing_columns_with_defaults()
|
176
181
|
inbound_ops = @method_mapper.operator_names
|
177
|
-
@default_values.each do |dn, dv|
|
182
|
+
@populator.default_values.each do |dn, dv|
|
178
183
|
logger.debug "Processing default value #{dn} : #{dv}"
|
179
|
-
assignment(dn, @load_object, dv) unless(inbound_ops.include?(dn))
|
184
|
+
@populator.assignment(dn, @load_object, dv) unless(inbound_ops.include?(dn))
|
180
185
|
end
|
181
186
|
end
|
182
187
|
|
@@ -200,8 +205,6 @@ module DataShift
|
|
200
205
|
end
|
201
206
|
|
202
207
|
|
203
|
-
# Default values and over rides can be provided in YAML config file.
|
204
|
-
#
|
205
208
|
# Any Config under key 'LoaderBase' is merged over existing options - taking precedence.
|
206
209
|
#
|
207
210
|
# Any Config under a key equal to the full name of the Loader class (e.g DataShift::SpreeHelper::ImageLoader)
|
@@ -211,55 +214,13 @@ module DataShift
|
|
211
214
|
#
|
212
215
|
# LoaderClass:
|
213
216
|
# option: value
|
214
|
-
#
|
215
|
-
# Load Class: (e.g Spree:Product)
|
216
|
-
# datashift_defaults:
|
217
|
-
# value_as_string: "Default Project Value"
|
218
|
-
# category: reference:category_002
|
219
|
-
#
|
220
|
-
# datashift_overrides:
|
221
|
-
# value_as_double: 99.23546
|
222
217
|
#
|
223
|
-
def configure_from(
|
218
|
+
def configure_from(yaml_file)
|
224
219
|
|
225
220
|
data = YAML::load( File.open(yaml_file) )
|
226
221
|
|
227
|
-
# TODO - MOVE DEFAULTS TO OWN MODULE
|
228
|
-
# decorate the loading class with the defaults/ove rides to manage itself
|
229
|
-
# IDEAS .....
|
230
|
-
#
|
231
|
-
#unless(@default_data_objects[load_object_class])
|
232
|
-
#
|
233
|
-
# @default_data_objects[load_object_class] = load_object_class.new
|
234
|
-
|
235
|
-
# default_data_object = @default_data_objects[load_object_class]
|
236
|
-
|
237
|
-
|
238
|
-
# default_data_object.instance_eval do
|
239
|
-
# def datashift_defaults=(hash)
|
240
|
-
# @datashift_defaults = hash
|
241
|
-
# end
|
242
|
-
# def datashift_defaults
|
243
|
-
# @datashift_defaults
|
244
|
-
# end
|
245
|
-
#end unless load_object_class.respond_to?(:datashift_defaults)
|
246
|
-
#end
|
247
|
-
|
248
|
-
#puts load_object_class.new.to_yaml
|
249
|
-
|
250
222
|
logger.info("Read Datashift loading config: #{data.inspect}")
|
251
|
-
|
252
|
-
if(data[load_object_class.name])
|
253
223
|
|
254
|
-
logger.info("Assigning defaults and over rides from config")
|
255
|
-
|
256
|
-
deflts = data[load_object_class.name]['datashift_defaults']
|
257
|
-
@default_values.merge!(deflts) if deflts
|
258
|
-
|
259
|
-
ovrides = data[load_object_class.name]['datashift_overrides']
|
260
|
-
@override_values.merge!(ovrides) if ovrides
|
261
|
-
end
|
262
|
-
|
263
224
|
if(data['LoaderBase'])
|
264
225
|
@config.merge!(data['LoaderBase'])
|
265
226
|
end
|
@@ -268,32 +229,18 @@ module DataShift
|
|
268
229
|
@config.merge!(data[self.class.name])
|
269
230
|
end
|
270
231
|
|
232
|
+
@populator.configure_from(load_object_class, yaml_file)
|
271
233
|
logger.info("Loader Options : #{@config.inspect}")
|
272
234
|
end
|
273
235
|
|
274
236
|
# Set member variables to hold details and value.
|
275
237
|
#
|
276
238
|
# Check supplied value, validate it, and if required :
|
277
|
-
# set to
|
278
|
-
# prepend
|
239
|
+
# set to provided default value
|
240
|
+
# prepend any provided prefixes
|
241
|
+
# add any provided postfixes
|
279
242
|
def prepare_data(method_detail, value)
|
280
|
-
|
281
|
-
@current_value = value
|
282
|
-
|
283
|
-
@current_method_detail = method_detail
|
284
|
-
|
285
|
-
operator = method_detail.operator
|
286
|
-
|
287
|
-
override_value(operator)
|
288
|
-
|
289
|
-
if((value.nil? || value.to_s.empty?) && default_value(operator))
|
290
|
-
@current_value = default_value(operator)
|
291
|
-
end
|
292
|
-
|
293
|
-
@current_value = "#{prefixes(operator)}#{@current_value}" if(prefixes(operator))
|
294
|
-
@current_value = "#{@current_value}#{postfixes(operator)}" if(postfixes(operator))
|
295
|
-
|
296
|
-
@current_value
|
243
|
+
return @populator.prepare_data(method_detail, value)
|
297
244
|
end
|
298
245
|
|
299
246
|
# Return the find_by operator and the rest of the (row,columns) data
|
@@ -310,11 +257,11 @@ module DataShift
|
|
310
257
|
#puts "DEBUG inbound_data: #{inbound_data} => #{operator} , #{rest}"
|
311
258
|
|
312
259
|
# Find by operator embedded in row takes precedence over operator in column heading
|
313
|
-
if(@current_method_detail.find_by_operator)
|
260
|
+
if(@populator.current_method_detail.find_by_operator)
|
314
261
|
# row contains 0.99 so rest is effectively operator, and operator is in method details
|
315
262
|
if(rest.nil?)
|
316
263
|
rest = operator
|
317
|
-
operator = @current_method_detail.find_by_operator
|
264
|
+
operator = @populator.current_method_detail.find_by_operator
|
318
265
|
end
|
319
266
|
end
|
320
267
|
|
@@ -330,11 +277,14 @@ module DataShift
|
|
330
277
|
#
|
331
278
|
def process()
|
332
279
|
|
333
|
-
|
280
|
+
current_method_detail = @populator.current_method_detail
|
281
|
+
current_value = @populator.current_value
|
282
|
+
|
283
|
+
logger.info("Current value to assign : #{current_value}")
|
334
284
|
|
335
|
-
if(
|
285
|
+
if(current_method_detail.operator_for(:has_many))
|
336
286
|
|
337
|
-
if(
|
287
|
+
if(current_method_detail.operator_class && current_value)
|
338
288
|
|
339
289
|
# there are times when we need to save early, for example before assigning to
|
340
290
|
# has_and_belongs_to associations which require the load_object has an id for the join table
|
@@ -343,7 +293,7 @@ module DataShift
|
|
343
293
|
|
344
294
|
# A single column can contain multiple associations delimited by special char
|
345
295
|
# Size:large|Colour:red,green,blue => ['Size:large', 'Colour:red,green,blue']
|
346
|
-
columns =
|
296
|
+
columns = current_value.to_s.split( Delimiters::multi_assoc_delim )
|
347
297
|
|
348
298
|
# Size:large|Colour:red,green,blue => generates find_by_size( 'large' ) and find_all_by_colour( ['red','green','blue'] )
|
349
299
|
|
@@ -355,25 +305,26 @@ module DataShift
|
|
355
305
|
|
356
306
|
find_by_values = col_values.split(Delimiters::multi_value_delim)
|
357
307
|
|
358
|
-
find_by_values <<
|
308
|
+
find_by_values << current_method_detail.find_by_value if(current_method_detail.find_by_value)
|
359
309
|
|
360
310
|
if(find_by_values.size > 1)
|
361
311
|
|
362
|
-
|
312
|
+
#RAILS 4 current_value = current_method_detail.operator_class.send("find_all_by_#{find_operator}", find_by_values )
|
313
|
+
current_value = current_method_detail.operator_class.where(find_operator => find_by_values)
|
363
314
|
|
364
|
-
unless(find_by_values.size ==
|
365
|
-
found =
|
366
|
-
@load_object.errors.add(
|
367
|
-
puts "WARNING: Association #{
|
315
|
+
unless(find_by_values.size == current_value.size)
|
316
|
+
found = current_value.collect {|f| f.send(find_operator) }
|
317
|
+
@load_object.errors.add( current_method_detail.operator, "Association with key(s) #{(find_by_values - found).inspect} NOT found")
|
318
|
+
puts "WARNING: Association #{current_method_detail.operator} with key(s) #{(find_by_values - found).inspect} NOT found - Not added."
|
368
319
|
next if(@current_value.empty?)
|
369
320
|
end
|
370
321
|
|
371
322
|
else
|
372
323
|
|
373
|
-
|
324
|
+
current_value = current_method_detail.operator_class.send("find_by_#{find_operator}", find_by_values )
|
374
325
|
|
375
|
-
unless(
|
376
|
-
@load_object.errors.add(
|
326
|
+
unless(current_value)
|
327
|
+
@load_object.errors.add( current_method_detail.operator, "Association with key #{find_by_values} NOT found")
|
377
328
|
puts "WARNING: Association with key #{find_by_values} NOT found - Not added."
|
378
329
|
next
|
379
330
|
end
|
@@ -381,14 +332,14 @@ module DataShift
|
|
381
332
|
end
|
382
333
|
|
383
334
|
# Lookup Assoc's Model done, now add the found value(s) to load model's collection
|
384
|
-
@
|
335
|
+
@populator.assign(current_method_detail, @load_object, current_value)
|
385
336
|
end
|
386
337
|
end
|
387
338
|
# END HAS_MANY
|
388
339
|
else
|
389
340
|
# Nice n simple straight assignment to a column variable
|
390
341
|
#puts "INFO: LOADER BASE processing #{method_detail.name}"
|
391
|
-
@
|
342
|
+
@populator.assign(current_method_detail, @load_object, current_value)
|
392
343
|
end
|
393
344
|
end
|
394
345
|
|
@@ -401,7 +352,7 @@ module DataShift
|
|
401
352
|
if(object)
|
402
353
|
@reporter.add_failed_object(object)
|
403
354
|
|
404
|
-
|
355
|
+
object.destroy if(rollback && object.respond_to?('destroy') && !object.new_record?)
|
405
356
|
|
406
357
|
new_load_object # don't forget to reset the load object
|
407
358
|
end
|
@@ -419,53 +370,13 @@ module DataShift
|
|
419
370
|
logger.error e.backtrace
|
420
371
|
raise "Error in save whilst processing column #{@current_method_detail.name}" if(@config[:strict])
|
421
372
|
end
|
422
|
-
end
|
423
|
-
|
424
|
-
def self.default_object_for( klass )
|
425
|
-
@default_data_objects ||= {}
|
426
|
-
@default_data_objects[klass]
|
427
|
-
end
|
428
|
-
|
429
|
-
def set_default_value( name, value )
|
430
|
-
@default_values[name] = value
|
431
|
-
end
|
432
|
-
|
433
|
-
def set_override_value( operator, value )
|
434
|
-
@override_values[operator] = value
|
435
|
-
end
|
436
|
-
|
437
|
-
def default_value(name)
|
438
|
-
@default_values[name]
|
439
|
-
end
|
440
|
-
|
441
|
-
def override_value( operator )
|
442
|
-
@current_value = @override_values[operator] if(@override_values[operator])
|
443
|
-
end
|
444
|
-
|
445
|
-
|
446
|
-
def set_prefix( name, value )
|
447
|
-
@prefixes[name] = value
|
448
|
-
end
|
449
|
-
|
450
|
-
def prefixes(name)
|
451
|
-
@prefixes[name]
|
452
|
-
end
|
453
|
-
|
454
|
-
def set_postfix( name, value )
|
455
|
-
@postfixes[name] = value
|
456
|
-
end
|
457
|
-
|
458
|
-
def postfixes(name)
|
459
|
-
@postfixes[name]
|
460
|
-
end
|
461
|
-
|
373
|
+
end
|
462
374
|
|
463
375
|
# Reset the loader, including database object to be populated, and load counts
|
464
376
|
#
|
465
377
|
def reset(object = nil)
|
466
378
|
@load_object = object || new_load_object
|
467
379
|
@reporter.reset
|
468
|
-
@current_value = nil
|
469
380
|
end
|
470
381
|
|
471
382
|
|
@@ -475,7 +386,7 @@ module DataShift
|
|
475
386
|
end
|
476
387
|
|
477
388
|
def abort_on_failure?
|
478
|
-
@config[:abort_on_failure] == 'true'
|
389
|
+
@config[:abort_on_failure].to_s == 'true'
|
479
390
|
end
|
480
391
|
|
481
392
|
def loaded_count
|
@@ -513,7 +424,7 @@ module DataShift
|
|
513
424
|
# Supported Syntax :
|
514
425
|
# assoc_find_name:value | assoc2_find_name:value | etc
|
515
426
|
def get_each_assoc
|
516
|
-
current_value.to_s.split( Delimiters::multi_assoc_delim )
|
427
|
+
@populator.current_value.to_s.split( Delimiters::multi_assoc_delim )
|
517
428
|
end
|
518
429
|
|
519
430
|
private
|
@@ -124,7 +124,7 @@ module DataShift
|
|
124
124
|
# try splitting up filename in various ways looking for the attachment owqner
|
125
125
|
split_on = @config['split_file_name_on'] || options[:split_file_name_on]
|
126
126
|
|
127
|
-
@loading_files_cache = Paperclip::get_files(path, options)
|
127
|
+
@loading_files_cache = DataShift::Paperclip::get_files(path, options)
|
128
128
|
|
129
129
|
puts "Found #{loading_files_cache.size} files - splitting names on delimiter [#{split_on}]"
|
130
130
|
|
@@ -4,114 +4,121 @@
|
|
4
4
|
# License:: MIT. Free, Open Source.
|
5
5
|
#
|
6
6
|
# Details:: Module containing common functionality for working with Paperclip attachments
|
7
|
-
#
|
7
|
+
#
|
8
8
|
require 'logging'
|
9
9
|
require 'paperclip'
|
10
10
|
|
11
11
|
module DataShift
|
12
12
|
|
13
13
|
module Paperclip
|
14
|
-
|
14
|
+
|
15
15
|
include DataShift::Logging
|
16
|
-
|
16
|
+
|
17
17
|
require 'paperclip/attachment_loader'
|
18
|
-
|
18
|
+
|
19
19
|
attr_accessor :attachment
|
20
|
-
|
20
|
+
|
21
21
|
# Get all image files (based on file extensions) from supplied path.
|
22
|
-
# Options :
|
22
|
+
# Options :
|
23
23
|
# :glob : The glob to use to find files
|
24
24
|
# => :recursive : Descend tree looking for files rather than just supplied path
|
25
|
-
|
25
|
+
|
26
26
|
def self.get_files(path, options = {})
|
27
|
+
return [path] if(File.file?(path))
|
27
28
|
glob = options[:glob] ? options[:glob] : '*.*'
|
28
29
|
glob = (options['recursive'] || options[:recursive]) ? "**/#{glob}" : glob
|
29
|
-
|
30
|
+
|
30
31
|
Dir.glob("#{path}/#{glob}", File::FNM_CASEFOLD)
|
31
32
|
end
|
32
|
-
|
33
|
+
|
33
34
|
def get_file( attachment_path )
|
34
|
-
|
35
|
+
|
35
36
|
unless File.exists?(attachment_path) && File.readable?(attachment_path)
|
36
37
|
logger.error("Cannot process Image from #{Dir.pwd}: Invalid Path #{attachment_path}")
|
37
38
|
raise PathError.new("Cannot process Image : Invalid Path #{attachment_path}")
|
38
39
|
end
|
39
|
-
|
40
|
+
|
40
41
|
file = begin
|
41
42
|
File.new(attachment_path, "rb")
|
42
43
|
rescue => e
|
43
44
|
puts e.inspect
|
44
45
|
raise PathError.new("ERROR : Failed to read image from #{attachment_path}")
|
45
46
|
end
|
46
|
-
|
47
|
+
|
47
48
|
file
|
48
49
|
end
|
49
|
-
|
50
|
+
|
50
51
|
# Note the paperclip attachment model defines the storage path via something like :
|
51
52
|
# => :path => ":rails_root/public/blah/blahs/:id/:style/:basename.:extension"
|
52
|
-
#
|
53
|
-
# Options
|
54
|
-
#
|
53
|
+
#
|
54
|
+
# Options
|
55
|
+
#
|
55
56
|
# :attributes
|
56
|
-
#
|
57
|
+
#
|
57
58
|
# Pass through a hash of attributes to the Paperclip klass's initializer
|
58
|
-
#
|
59
|
+
#
|
59
60
|
# :has_attached_file_name
|
60
|
-
#
|
61
|
-
# Paperclip attachment name defined with macro 'has_attached_file :name'
|
62
|
-
#
|
61
|
+
#
|
62
|
+
# Paperclip attachment name defined with macro 'has_attached_file :name'
|
63
|
+
#
|
63
64
|
# This is usually called/defaults :attachment
|
64
|
-
#
|
65
|
-
# e.g
|
66
|
-
# When : has_attached_file :avatar
|
67
|
-
#
|
65
|
+
#
|
66
|
+
# e.g
|
67
|
+
# When : has_attached_file :avatar
|
68
|
+
#
|
68
69
|
# Give : {:has_attached_file_attribute => :avatar}
|
69
|
-
#
|
70
|
-
# When : has_attached_file :icon
|
70
|
+
#
|
71
|
+
# When : has_attached_file :icon
|
71
72
|
#
|
72
73
|
# Give : { :has_attached_file_attribute => :icon }
|
73
|
-
#
|
74
|
+
#
|
74
75
|
def create_attachment(klass, attachment_path, record = nil, attach_to_record_field = nil, options = {})
|
75
|
-
|
76
|
+
|
76
77
|
has_attached_file_attribute = options[:has_attached_file_name] ? options[:has_attached_file_name].to_sym : :attachment
|
77
|
-
|
78
|
+
|
78
79
|
# e.g (:attachment => File.read)
|
79
|
-
|
80
|
-
|
80
|
+
|
81
|
+
attachment_file = get_file(attachment_path)
|
82
|
+
paperclip_attributes = { has_attached_file_attribute => attachment_file }
|
83
|
+
|
81
84
|
paperclip_attributes.merge!(options[:attributes]) if(options[:attributes])
|
82
|
-
|
83
|
-
begin
|
84
|
-
@attachment = klass.new(paperclip_attributes, :without_protection => true)
|
85
|
+
|
86
|
+
begin
|
87
|
+
@attachment = klass.new(paperclip_attributes, :without_protection => true)
|
85
88
|
rescue => e
|
86
89
|
puts e.inspect
|
87
90
|
logger.error("Failed to create PaperClip Attachment : #{e.inspect}")
|
88
91
|
raise CreateAttachmentFailed.new("Failed to create PaperClip Attachment from : #{attachment_path}")
|
92
|
+
ensure
|
93
|
+
attachment_file.close unless attachment_file.closed?
|
89
94
|
end
|
90
|
-
|
95
|
+
|
91
96
|
begin
|
92
|
-
|
97
|
+
|
93
98
|
if(@attachment.save)
|
94
99
|
puts "Success: Created Attachment #{@attachment.id} : #{@attachment.attachment_file_name}"
|
95
|
-
|
100
|
+
|
96
101
|
if(attach_to_record_field.is_a? MethodDetail)
|
97
|
-
|
102
|
+
DataShift::Populator.new().assign(attach_to_record_field, record, @attachment)
|
98
103
|
else
|
99
|
-
# assume its not a has_many and try basic send
|
104
|
+
# assume its not a has_many and try basic send
|
100
105
|
record.send("#{attach_to_record_field}=", @attachment)
|
101
106
|
end if(record && attach_to_record_field)
|
102
|
-
|
107
|
+
|
103
108
|
else
|
104
109
|
puts "ERROR : Problem saving to DB : #{@attachment.inspect}"
|
105
110
|
puts @attachment.errors.messages.inspect
|
106
111
|
end
|
107
|
-
|
112
|
+
|
108
113
|
@attachment
|
109
114
|
rescue => e
|
110
115
|
logger.error("Problem saving Paperclip Attachment: #{e.inspect}")
|
111
116
|
puts e.inspect
|
112
117
|
raise CreateAttachmentFailed.new("PaperClip error - Problem saving Attachment")
|
118
|
+
ensure
|
119
|
+
attachment_file.close unless attachment_file.closed?
|
113
120
|
end
|
114
|
-
end
|
121
|
+
end
|
115
122
|
end
|
116
|
-
|
123
|
+
|
117
124
|
end
|