datashift 0.10.1 → 0.10.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (44) hide show
  1. data/Rakefile +6 -1
  2. data/VERSION +1 -1
  3. data/datashift.gemspec +13 -6
  4. data/lib/datashift.rb +2 -20
  5. data/lib/datashift/exceptions.rb +2 -0
  6. data/lib/datashift/method_detail.rb +15 -29
  7. data/lib/datashift/method_dictionary.rb +36 -21
  8. data/lib/datashift/method_mapper.rb +56 -16
  9. data/lib/datashift/populator.rb +23 -0
  10. data/lib/datashift/querying.rb +86 -0
  11. data/lib/generators/csv_generator.rb +1 -4
  12. data/lib/generators/excel_generator.rb +28 -11
  13. data/lib/generators/generator_base.rb +12 -0
  14. data/lib/loaders/csv_loader.rb +9 -3
  15. data/lib/loaders/excel_loader.rb +14 -6
  16. data/lib/loaders/loader_base.rb +38 -125
  17. data/lib/loaders/paperclip/attachment_loader.rb +130 -62
  18. data/lib/loaders/paperclip/datashift_paperclip.rb +46 -12
  19. data/lib/loaders/paperclip/image_loading.rb +25 -41
  20. data/lib/thor/generate.thor +16 -6
  21. data/lib/thor/paperclip.thor +25 -5
  22. data/spec/Gemfile +3 -2
  23. data/spec/MissingAttachmentRecords/DEMO_001_ror_bag.jpeg +0 -0
  24. data/spec/{fixtures/images/DEMO_002_Powerstation.jpg → MissingAttachmentRecords/DEMO_002_Powerstation.jpeg} +0 -0
  25. data/spec/MissingAttachmentRecords/DEMO_002_Powerstation.jpg +0 -0
  26. data/spec/MissingAttachmentRecords/DEMO_003_ror_mug.jpeg +0 -0
  27. data/spec/MissingAttachmentRecords/DEMO_004_ror_ringer.jpeg +0 -0
  28. data/spec/excel_generator_spec.rb +28 -0
  29. data/spec/excel_loader_spec.rb +12 -17
  30. data/spec/fixtures/config/database.yml +1 -1
  31. data/spec/fixtures/db/datashift_test_models_db.sqlite +0 -0
  32. data/spec/fixtures/db/migrate/20121009161700_add_digitals.rb +24 -0
  33. data/spec/fixtures/images/DEMO_002_Powerstation.jpeg +0 -0
  34. data/spec/fixtures/models/digital.rb +14 -0
  35. data/spec/fixtures/models/owner.rb +5 -3
  36. data/spec/fixtures/test_model_defs.rb +4 -62
  37. data/spec/loader_spec.rb +42 -50
  38. data/spec/method_dictionary_spec.rb +3 -10
  39. data/spec/method_mapper_spec.rb +79 -20
  40. data/spec/paperclip_loader_spec.rb +95 -0
  41. data/spec/spec_helper.rb +44 -8
  42. metadata +236 -224
  43. data/lib/helpers/rake_utils.rb +0 -42
  44. data/spec/fixtures/models/test_model_defs.rb +0 -67
@@ -18,6 +18,29 @@ module DataShift
18
18
  @insistent_method_list
19
19
  end
20
20
 
21
+ def self.insistent_assignment( record, value, operator )
22
+
23
+ #puts "DEBUG: RECORD CLASS #{record.class}"
24
+ op = operator + '='
25
+
26
+ begin
27
+ record.send(op, value)
28
+ rescue => e
29
+ Populator::insistent_method_list.each do |f|
30
+ begin
31
+ record.send(op, value.send( f) )
32
+ break
33
+ rescue => e
34
+ #puts "DEBUG: insistent_assignment: #{e.inspect}"
35
+ if f == Populator::insistent_method_list.last
36
+ puts "I'm sorry I have failed to assign [#{value}] to #{operator}"
37
+ raise "I'm sorry I have failed to assign [#{value}] to #{operator}" unless value.nil?
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
43
+
21
44
  def assignment( operator, record, value )
22
45
  #puts "DEBUG: RECORD CLASS #{record.class}"
23
46
  op = operator + '=' unless(operator.include?('='))
@@ -0,0 +1,86 @@
1
+ # Copyright:: (c) Autotelik Media Ltd 2011
2
+ # Author :: Tom Statter
3
+ # Date :: Aug 2010
4
+ # License:: MIT
5
+ #
6
+ # Details:: Base class for loaders, providing a process hook which populates a model,
7
+ # based on a method map and supplied value from a file - i.e a single column/row's string value.
8
+ # Note that although a single column, the string can be formatted to contain multiple values.
9
+ #
10
+ # Tightly coupled with MethodMapper classes (in lib/engine) which contains full details of
11
+ # a file's column and it's correlated AR associations.
12
+ #
13
+ module DataShift
14
+
15
+ require 'datashift/method_mapper'
16
+
17
+ module Querying
18
+
19
+ def search_for_record(klazz, field, search_term, options = {})
20
+
21
+ begin
22
+
23
+ if(options[:case_sensitive])
24
+ return klazz.send("find_by_#{field}", search_term)
25
+ elsif(options[:use_like])
26
+ return klazz.where("#{field} like ?", "#{search_term}%").first
27
+ else
28
+ return klazz.where("lower(#{field}) = ?", search_term.downcase).first
29
+ end
30
+
31
+ rescue => e
32
+ puts e.inspect
33
+ logger.error("Exception attempting to find a record for [#{search_term}] on #{klazz}.#{field}")
34
+ logger.error e.backtrace
35
+ logger.error e.inspect
36
+ end
37
+
38
+ nil
39
+ end
40
+
41
+ # Find a record for model klazz, looking up on field containing search_terms
42
+ # Responds to global Options :
43
+ # :case_sensitive : Default is a case insensitive lookup.
44
+ # :use_like : Attempts a lookup using ike and x% rather than equality
45
+ #
46
+ # Returns nil if no record found
47
+ def get_record_by(klazz, field, search_term, split_on = ' ', split_on_prefix = nil)
48
+
49
+ begin
50
+
51
+ record = search_for_record(klazz, field, search_term)
52
+
53
+ # try individual portions of search_term, front -> back i.e "A_B_C_D" => A, B, C etc
54
+ search_term.split(split_on).each do |str|
55
+ z = (split_on_prefix) ? "#{split_on_prefix}#{str}": str
56
+ record = search_for_record(klazz, field, z)
57
+ break if record
58
+ end unless(record)
59
+
60
+ # this time try incrementally scanning i.e "A_B_C_D" => A, A_B, A_B_C etc
61
+ search_term.split(split_on).inject("") do |str, term|
62
+ z = (split_on_prefix) ? "#{split_on_prefix}#{str}#{split_on}#{term}": "#{str}#{split_on}#{term}"
63
+ record = search_for_record(klazz, field, z)
64
+ break if record
65
+ term
66
+ end unless(record)
67
+
68
+ return record
69
+ rescue => e
70
+ logger.error("Exception attempting to find a record for [#{search_term}] on #{klazz}.#{field}")
71
+ logger.error e.backtrace
72
+ logger.error e.inspect
73
+ return nil
74
+ end
75
+ end
76
+
77
+ def get_record_by!(klazz, field, search_terms, split_on = ' ', split_on_prefix = nil)
78
+ x = get_record_by(klazz, field, search_terms, split_on, split_on_prefix)
79
+
80
+ raise RecordNotFound, "No #{klazz} record found for [#{search_terms}] on #{field}" unless(x)
81
+
82
+ x
83
+ end
84
+ end
85
+
86
+ end
@@ -12,11 +12,8 @@ module DataShift
12
12
 
13
13
  class CsvGenerator < GeneratorBase
14
14
 
15
- attr_accessor :excel, :filename
16
-
17
15
  def initialize(filename)
18
- @excel = nil
19
- @filename = filename
16
+ super(filename)
20
17
  end
21
18
 
22
19
  # Create CSV file representing supplied Model
@@ -8,20 +8,19 @@
8
8
  # TOD : Can we switch between .xls and XSSF (POI implementation of Excel 2007 OOXML (.xlsx) file format.)
9
9
  #
10
10
  #
11
+ require 'generator_base'
12
+ require 'excel'
13
+
11
14
  module DataShift
12
-
13
- require 'generator_base'
14
15
 
15
- require 'excel'
16
-
17
16
  class ExcelGenerator < GeneratorBase
18
17
 
19
18
  include DataShift::Logging
20
19
 
21
- attr_accessor :excel, :filename
20
+ attr_accessor :excel
22
21
 
23
22
  def initialize(filename)
24
- @filename = filename
23
+ super(filename)
25
24
  end
26
25
 
27
26
  # Create an Excel file template (header row) representing supplied Model
@@ -31,8 +30,14 @@ module DataShift
31
30
  def generate(klass, options = {})
32
31
 
33
32
  prepare_excel(klass, options)
34
-
35
- @excel.set_headers(MethodDictionary.assignments[klass])
33
+
34
+ prep_remove_list(options)
35
+
36
+ @headers = MethodDictionary.assignments[klass]
37
+
38
+ @headers.delete_if{|h| @remove_list.include?( h.to_sym ) }
39
+
40
+ @excel.set_headers( @headers )
36
41
 
37
42
  logger.info("ExcelGenerator saving generated template #{@filename}")
38
43
 
@@ -57,6 +62,9 @@ module DataShift
57
62
  #
58
63
  # * <tt>:remove</tt> - Association NAME(s) to remove .. :title, :id, :name
59
64
  # .
65
+ # * <tt>:remove_rails</tt> - Remove Rails DB columns :
66
+ # :id, :created_at, :created_on, :updated_at, :updated_on
67
+ #
60
68
  def generate_with_associations(klass, options = {})
61
69
 
62
70
  prepare_excel(klass, options)
@@ -65,9 +73,9 @@ module DataShift
65
73
 
66
74
  work_list = MethodDetail::supported_types_enum.to_a - [ *options[:exclude] ]
67
75
 
68
- remove_list = [ *options[:remove] ].compact.collect{|x| x.to_s.downcase.to_sym }
69
-
70
- headers = []
76
+ prep_remove_list(options)
77
+
78
+ @headers = []
71
79
 
72
80
  details_mgr = MethodDictionary.method_details_mgrs[klass]
73
81
 
@@ -94,6 +102,15 @@ module DataShift
94
102
 
95
103
  private
96
104
 
105
+ # Take options and create a list of symbols to remove from headers
106
+ #
107
+ def prep_remove_list( options )
108
+ @remove_list = [ *options[:remove] ].compact.collect{|x| x.to_s.downcase.to_sym }
109
+
110
+ @remove_list += GeneratorBase::rails_columns if(options[:remove_rails])
111
+ end
112
+
113
+
97
114
  def prepare_excel(klass, options = {})
98
115
  @filename = options[:filename] if options[:filename]
99
116
 
@@ -9,6 +9,18 @@ module DataShift
9
9
 
10
10
  class GeneratorBase
11
11
 
12
+ attr_accessor :filename, :headers, :remove_list
13
+
14
+ def initialize(filename)
15
+ @filename = filename
16
+ @headers = []
17
+ @remove_list =[]
18
+ end
19
+
20
+
21
+ def self.rails_columns
22
+ @rails_standard_columns ||= [:id, :created_at, :created_on, :updated_at, :updated_on]
23
+ end
12
24
  end
13
25
 
14
26
  end
@@ -16,8 +16,14 @@ module DataShift
16
16
 
17
17
  include DataShift::Logging
18
18
 
19
- # Options :
20
- # strict : Raise exception if any column cannot be mapped
19
+ # Assumes header_row is first row i.e row 0
20
+ #
21
+ # Options passed through to : populate_method_mapper_from_headers
22
+ #
23
+ # [:mandatory] : Array of mandatory column names
24
+ # [:force_inclusion] : Array of inbound column names to force into mapping
25
+ # [:include_all] : Include all headers in processing - takes precedence of :force_inclusion
26
+ # [:strict] : Raise exception when no mapping found for a column heading (non mandatory)
21
27
 
22
28
  def perform_csv_load(file_name, options = {})
23
29
 
@@ -30,7 +36,7 @@ module DataShift
30
36
 
31
37
  # Create a method_mapper which maps list of headers into suitable calls on the Active Record class
32
38
  # For example if model has an attribute 'price' will map columns called Price, price, PRICE etc to this attribute
33
- map_headers_to_operators( @parsed_file.shift, options)
39
+ populate_method_mapper_from_headers( @parsed_file.shift, options)
34
40
 
35
41
  puts "\n\n\nLoading from CSV file: #{file_name}"
36
42
  puts "Processing #{@parsed_file.size} rows"
@@ -24,8 +24,12 @@ module DataShift
24
24
  # Options:
25
25
  # [:sheet_number] : Default is 0. The index of the Excel Worksheet to use.
26
26
  # [:header_row] : Default is 0. Use alternative row as header definition.
27
+ #
28
+ # Options passed through to : populate_method_mapper_from_headers
29
+ #
27
30
  # [:mandatory] : Array of mandatory column names
28
31
  # [:force_inclusion] : Array of inbound column names to force into mapping
32
+ # [:include_all] : Include all headers in processing - takes precedence of :force_inclusion
29
33
  # [:strict] : Raise exception when no mapping found for a column heading (non mandatory)
30
34
 
31
35
  def perform_excel_load( file_name, options = {} )
@@ -66,7 +70,9 @@ module DataShift
66
70
 
67
71
  # Create a method_mapper which maps list of headers into suitable calls on the Active Record class
68
72
  # For example if model has an attribute 'price' will map columns called Price, price, PRICE etc to this attribute
69
- map_headers_to_operators( @headers, options )
73
+ populate_method_mapper_from_headers( @headers, options )
74
+
75
+ @method_mapper
70
76
 
71
77
  logger.info "Excel Loader processing #{@sheet.num_rows} rows"
72
78
 
@@ -97,11 +103,13 @@ module DataShift
97
103
  # as part of this we also attempt to save early, for example before assigning to
98
104
  # has_and_belongs_to associations which require the load_object has an id for the join table
99
105
 
100
- # Iterate over the columns method_mapper found in Excel,
101
- # pulling data out of associated column
102
- @method_mapper.method_details.each_with_index do |method_detail, col|
106
+ # Iterate over method_details, working on data out of associated Excel column
107
+ @method_mapper.method_details.each do |method_detail|
108
+
109
+
110
+ next unless method_detail # TODO populate unmapped with a real MethodDetail that is 'null' and create is_nil
103
111
 
104
- value = row[col]
112
+ value = row[method_detail.column_index]
105
113
 
106
114
  contains_data = true unless(value.nil? || value.to_s.empty?)
107
115
 
@@ -118,7 +126,7 @@ module DataShift
118
126
  unless(save)
119
127
  failure
120
128
  logger.error "Failed to save row [#{row}]"
121
- logger.error load_object.errors.inspect
129
+ logger.error load_object.errors.inspect if(load_object)
122
130
  else
123
131
  logger.info "Row #{row} succesfully SAVED : ID #{load_object.id}"
124
132
  end
@@ -13,11 +13,13 @@
13
13
  module DataShift
14
14
 
15
15
  require 'datashift/method_mapper'
16
+ require 'datashift/querying'
16
17
 
17
18
  class LoaderBase
18
19
 
19
20
  include DataShift::Logging
20
21
  include DataShift::Populator
22
+ include DataShift::Querying
21
23
 
22
24
  attr_reader :headers
23
25
 
@@ -32,61 +34,7 @@ module DataShift
32
34
 
33
35
  def options() return @config; end
34
36
 
35
- # Support multiple associations being added to a base object to be specified in a single column.
36
- #
37
- # Entry represents the association to find via supplied name, value to use in the lookup.
38
- # Can contain multiple lookup name/value pairs, separated by multi_assoc_delim ( | )
39
- #
40
- # Default syntax :
41
- #
42
- # Name1:value1, value2|Name2:value1, value2, value3|Name3:value1, value2
43
- #
44
- # E.G.
45
- # Association Properties, has a column named Size, and another called Colour,
46
- # and this combination could be used to lookup multiple associations to add to the main model Jumper
47
- #
48
- # Size:small # => generates find_by_size( 'small' )
49
- # Size:large # => generates find_by_size( 'large' )
50
- # Colour:red,green,blue # => generates find_all_by_colour( ['red','green','blue'] )
51
- #
52
- # Size:large|Size:medium|Size:large
53
- # => Find 3 different associations, perform lookup via column called Size
54
- # => Jumper.properties << [ small, medium, large ]
55
- #
56
- def self.name_value_delim
57
- @name_value_delim ||= ':'
58
- @name_value_delim
59
- end
60
-
61
- def self.set_name_value_delim(x) @name_value_delim = x; end
62
- # TODO - support embedded object creation/update via hash (which hopefully we should be able to just forward to AR)
63
- #
64
- # |Category|
65
- # name:new{ :date => '20110102', :owner = > 'blah'}
66
- #
67
-
68
-
69
- def self.multi_value_delim
70
- @multi_value_delim ||= ','
71
- @multi_value_delim
72
- end
73
-
74
- def self.set_multi_value_delim(x) @multi_value_delim = x; end
75
-
76
- # TODO - support multi embedded object creation/update via hash (which hopefully we should be able to just forward to AR)
77
- #
78
- # |Category|
79
- # name:new{ :a => 1, :b => 2}|name:medium{ :a => 6, :b => 34}|name:old{ :a => 12, :b => 67}
80
- #
81
- def self.multi_assoc_delim
82
- @multi_assoc_delim ||= '|'
83
- @multi_assoc_delim
84
- end
85
-
86
-
87
- def self.set_multi_assoc_delim(x) @multi_assoc_delim = x; end
88
37
 
89
-
90
38
  # Setup loading
91
39
  #
92
40
  # Options to drive building the method dictionary for a class, enabling headers to be mapped to operators on that class.
@@ -109,7 +57,7 @@ module DataShift
109
57
  # Create dictionary of data on all possible 'setter' methods which can be used to
110
58
  # populate or integrate an object of type @load_object_class
111
59
  DataShift::MethodDictionary.build_method_details(@load_object_class)
112
- end unless(options[:load] == false)
60
+ end if(options[:load] || options[:reload])
113
61
 
114
62
  @method_mapper = DataShift::MethodMapper.new
115
63
  @config = options.dup # clone can cause issues like 'can't modify frozen hash'
@@ -125,7 +73,9 @@ module DataShift
125
73
  @prefixes = {}
126
74
  @postfixes = {}
127
75
 
76
+ # TODO - move to own LoadStats or LoadReport class
128
77
  @loaded_objects = []
78
+ @failed_objects = []
129
79
 
130
80
  reset(object)
131
81
  end
@@ -163,30 +113,34 @@ module DataShift
163
113
 
164
114
 
165
115
 
166
- # Core API - Given a list of free text column names from a file,
167
- # map all headers to a method detail containing operator details.
116
+ # Core API
117
+ #
118
+ # Given a list of free text column names from a file,
119
+ # map all headers to a MethodDetail instance containing details on operator, look ups etc.
168
120
  #
169
- # This is then available through @method_mapper.method_details.each
121
+ # These are available through @method_mapper.method_details
170
122
  #
171
123
  # Options:
172
- # strict : Raise an exception of any headers can't be mapped to an attribute/association
173
- # ignore : List of column headers to ignore when building operator map
174
- # mandatory : List of columns that must be present in headers
124
+ # [:strict] : Raise an exception of any headers can't be mapped to an attribute/association
125
+ # [:ignore] : List of column headers to ignore when building operator map
126
+ # [:mandatory] : List of columns that must be present in headers
175
127
  #
176
- # force_inclusion : List of columns that do not map to any operator but should be includeed in processing.
177
- # This provides the opportunity for loaders to provide specific methods to handle these fields
178
- # when no direct operator is available on the modle or it's associations
128
+ # [:force_inclusion] : List of columns that do not map to any operator but should be includeed in processing.
129
+ #
130
+ # This provides the opportunity for loaders to provide specific methods to handle these fields
131
+ # when no direct operator is available on the modle or it's associations
179
132
  #
180
- def map_headers_to_operators( headers, options = {} )
133
+ # [:include_all] : Include all headers in processing - takes precedence of :force_inclusion
134
+ #
135
+ def populate_method_mapper_from_headers( headers, options = {} )
181
136
  @headers = headers
182
137
 
183
138
  mandatory = options[:mandatory] || []
184
-
185
-
139
+
186
140
  strict = (options[:strict] == true)
187
141
 
188
142
  begin
189
- @method_mapper.map_inbound_to_methods( load_object_class, @headers, options )
143
+ @method_mapper.map_inbound_headers_to_methods( load_object_class, @headers, options )
190
144
  rescue => e
191
145
  puts e.inspect, e.backtrace
192
146
  logger.error("Failed to map header row to set of database operators : #{e.inspect}")
@@ -194,7 +148,7 @@ module DataShift
194
148
  end
195
149
 
196
150
  unless(@method_mapper.missing_methods.empty?)
197
- puts "WARNING: These headings couldn't be mapped to class #{load_object_class} : #{@method_mapper.missing_methods.inspect}"
151
+ puts "WARNING: These headings couldn't be mapped to class #{load_object_class} :\n#{@method_mapper.missing_methods.inspect}"
198
152
  raise MappingDefinitionError, "Missing mappings for columns : #{@method_mapper.missing_methods.join(",")}" if(strict)
199
153
  end
200
154
 
@@ -202,6 +156,8 @@ module DataShift
202
156
  @method_mapper.missing_mandatory(mandatory).each { |e| puts "ERROR: Mandatory column missing - expected column '#{e}'" }
203
157
  raise MissingMandatoryError, "Mandatory columns missing - please fix and retry."
204
158
  end unless(mandatory.empty?)
159
+
160
+ @method_mapper
205
161
  end
206
162
 
207
163
 
@@ -219,59 +175,20 @@ module DataShift
219
175
  #
220
176
  # If suitable association found, process row data and then assign to current load_object
221
177
  def find_and_process(column_name, data)
178
+
179
+ puts "WARNING: MethodDictionary empty for class #{load_object_class}" unless(MethodDictionary.for?(load_object_class))
180
+
222
181
  method_detail = MethodDictionary.find_method_detail( load_object_class, column_name )
223
182
 
224
183
  if(method_detail)
225
184
  prepare_data(method_detail, data)
226
185
  process()
227
186
  else
228
- @load_object.errors.add_base( "No matching method found for column #{column_name}")
187
+ @load_object.errors.add(:base, "No matching method found for column #{column_name}")
229
188
  end
230
189
  end
231
190
 
232
191
 
233
- # Find a record for model klazz, looking up on field containing search_terms
234
- # Responds to global Options :
235
- # :case_sensitive : Default is a case insensitive lookup.
236
- # :use_like : Attempts a lookup using ike and x% ratehr than equality
237
-
238
- def get_record_by(klazz, field, search_terms, split_on = ' ', split_on_prefix = nil)
239
-
240
- begin
241
- record = if(@config[:case_sensitive])
242
- klazz.send("find_by_#{field}", search_terms)
243
- elsif(@config[:use_like])
244
- klazz.where("#{field} like ?", "#{search_terms}%").first
245
- else
246
- klazz.where("lower(#{field}) = ?", search_terms.downcase).first
247
- end
248
-
249
- # try the separate individual portions of the search_terms, front -> back
250
- search_terms.split(split_on).each do |x|
251
- z = "#{split_on_prefix}#{x}" if(split_on_prefix)
252
-
253
- record = get_record_by(klazz, field, z, split_on, split_on_prefix)
254
- break if record
255
- end unless(record)
256
-
257
- # this time try sequentially and incrementally scanning
258
- search_terms.split(split_on).inject("") do |str, term|
259
- z = (split_on_prefix) ? "#{split_on_prefix}#{str}#{term}": "#{str}#{term}"
260
- record = get_record_by(klazz, field, z, split_on, split_on_prefix)
261
- break if record
262
- term
263
- end unless(record)
264
-
265
- return record
266
-
267
- rescue => e
268
- logger.error("Exception attempting to find a record for [#{search_terms}] on #{klazz}.#{field}")
269
- logger.error e.backtrace
270
- logger.error e.inspect
271
- return nil
272
- end
273
- end
274
-
275
192
  # Default values and over rides can be provided in YAML config file.
276
193
  #
277
194
  # Any Config under key 'LoaderBase' is merged over existing options - taking precedence.
@@ -377,7 +294,7 @@ module DataShift
377
294
  #
378
295
  def get_find_operator_and_rest(inbound_data)
379
296
 
380
- operator, rest = inbound_data.split(LoaderBase::name_value_delim)
297
+ operator, rest = inbound_data.split(Delimiters::name_value_delim)
381
298
 
382
299
  #puts "DEBUG inbound_data: #{inbound_data} => #{operator} , #{rest}"
383
300
 
@@ -415,23 +332,17 @@ module DataShift
415
332
 
416
333
  # A single column can contain multiple associations delimited by special char
417
334
  # Size:large|Colour:red,green,blue => ['Size:large', 'Colour:red,green,blue']
418
- columns = @current_value.to_s.split( LoaderBase::multi_assoc_delim)
335
+ columns = @current_value.to_s.split( Delimiters::multi_assoc_delim)
419
336
 
420
337
  # Size:large|Colour:red,green,blue => generates find_by_size( 'large' ) and find_all_by_colour( ['red','green','blue'] )
421
338
 
422
339
  columns.each do |col_str|
423
340
 
424
341
  find_operator, col_values = get_find_operator_and_rest( col_str )
425
-
426
- #if(@current_method_detail.find_by_operator)
427
- # find_operator, col_values = @current_method_detail.find_by_operator, col_str
428
- # else
429
- # find_operator, col_values = col_str.split(LoaderBase::name_value_delim)
430
- # end
431
-
342
+
432
343
  raise "Cannot perform DB find by #{find_operator}. Expected format key:value" unless(find_operator && col_values)
433
344
 
434
- find_by_values = col_values.split(LoaderBase::multi_value_delim)
345
+ find_by_values = col_values.split(Delimiters::multi_value_delim)
435
346
 
436
347
  find_by_values << @current_method_detail.find_by_value if(@current_method_detail.find_by_value)
437
348
 
@@ -471,11 +382,13 @@ module DataShift
471
382
  end
472
383
 
473
384
  def failure
474
- @failed_objects << @load_object unless( !load_object.new_record? || @failed_objects.include?(@load_object))
385
+ @failed_objects << @load_object unless( @load_object.nil? || @load_object.new_record? || @failed_objects.include?(@load_object))
475
386
  end
476
387
 
477
388
  def save
478
- puts "DEBUG: SAVING #{load_object.class} : #{load_object.inspect}" if(@verbose)
389
+ return unless( @load_object )
390
+
391
+ puts "DEBUG: SAVING #{@load_object.class} : #{@load_object.inspect}" if(@verbose)
479
392
  begin
480
393
  result = @load_object.save
481
394
 
@@ -582,7 +495,7 @@ module DataShift
582
495
  # Supported Syntax :
583
496
  # assoc_find_name:value | assoc2_find_name:value | etc
584
497
  def get_each_assoc
585
- current_value.to_s.split( LoaderBase::multi_assoc_delim )
498
+ current_value.to_s.split( Delimiters::multi_assoc_delim )
586
499
  end
587
500
 
588
501
  private