datashift 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. data/Rakefile +3 -0
  2. data/VERSION +1 -1
  3. data/datashift.gemspec +6 -4
  4. data/lib/datashift/method_detail.rb +2 -2
  5. data/lib/datashift/method_dictionary.rb +15 -2
  6. data/lib/datashift/method_mapper.rb +18 -5
  7. data/lib/exporters/excel_exporter.rb +1 -2
  8. data/lib/helpers/spree_helper.rb +18 -12
  9. data/lib/loaders/csv_loader.rb +1 -8
  10. data/lib/loaders/excel_loader.rb +7 -7
  11. data/lib/loaders/loader_base.rb +34 -6
  12. data/lib/loaders/spreadsheet_loader.rb +86 -78
  13. data/lib/loaders/spree/image_loader.rb +72 -24
  14. data/lib/loaders/spree/product_loader.rb +3 -31
  15. data/lib/thor/import_excel.thor +15 -16
  16. data/lib/thor/spree/products_images.thor +5 -3
  17. data/sandbox/config/application.rb +21 -5
  18. data/sandbox/config/database.yml +18 -32
  19. data/sandbox/config/environment.rb +1 -3
  20. data/sandbox/config/environments/development.rb +9 -2
  21. data/spec/Gemfile +16 -5
  22. data/spec/fixtures/datashift_Spree_db.sqlite +0 -0
  23. data/spec/fixtures/datashift_test_models_db.sqlite +0 -0
  24. data/spec/fixtures/spree/{SpreeProductImages.xls → SpreeImages.xls} +0 -0
  25. data/spec/fixtures/spree/SpreeProductsWithImages.csv +3 -3
  26. data/spec/fixtures/spree/SpreeProductsWithImages.xls +0 -0
  27. data/spec/fixtures/test_model_defs.rb +1 -0
  28. data/spec/spec_helper.rb +13 -11
  29. data/spec/spree_exporter_spec.rb +6 -4
  30. data/spec/spree_generator_spec.rb +14 -10
  31. data/spec/spree_images_loader_spec.rb +59 -24
  32. data/spec/spree_loader_spec.rb +12 -10
  33. data/spec/spree_method_mapping_spec.rb +16 -9
  34. data/tasks/spree/image_load.rake +2 -8
  35. metadata +15 -6
data/Rakefile CHANGED
@@ -38,6 +38,9 @@ Jeweler::Tasks.new do |gem|
38
38
  gem.email = "rubygems@autotelik.co.uk"
39
39
  gem.authors = ["Thomas Statter"]
40
40
  # dependencies defined in Gemfile
41
+ gem.files.exclude ['sandbox']
42
+
43
+ gem.add_dependency ['spreadsheet']
41
44
  end
42
45
  Jeweler::RubygemsDotOrgTasks.new
43
46
 
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.5.0
1
+ 0.6.0
data/datashift.gemspec CHANGED
@@ -5,11 +5,11 @@
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = "datashift"
8
- s.version = "0.5.0"
8
+ s.version = "0.6.0"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11
11
  s.authors = ["Thomas Statter"]
12
- s.date = "2012-05-08"
12
+ s.date = "2012-05-18"
13
13
  s.description = "A suite of tools to move data between ActiveRecord models,databases,applications like Excel/Open Office, files and projects including Spree"
14
14
  s.email = "rubygems@autotelik.co.uk"
15
15
  s.extra_rdoc_files = [
@@ -24,7 +24,6 @@ Gem::Specification.new do |s|
24
24
  "README.rdoc",
25
25
  "Rakefile",
26
26
  "VERSION",
27
- "datashift-0.5.0.gem",
28
27
  "datashift.gemspec",
29
28
  "lib/applications/jruby/jexcel_file.rb",
30
29
  "lib/applications/jruby/word.rb",
@@ -122,7 +121,7 @@ Gem::Specification.new do |s|
122
121
  "spec/fixtures/negative/SpreeProdMissManyMandatory.xls",
123
122
  "spec/fixtures/simple_export_spec.xls",
124
123
  "spec/fixtures/simple_template_spec.xls",
125
- "spec/fixtures/spree/SpreeProductImages.xls",
124
+ "spec/fixtures/spree/SpreeImages.xls",
126
125
  "spec/fixtures/spree/SpreeProducts.csv",
127
126
  "spec/fixtures/spree/SpreeProducts.xls",
128
127
  "spec/fixtures/spree/SpreeProductsDefaults.yml",
@@ -167,9 +166,12 @@ Gem::Specification.new do |s|
167
166
  s.specification_version = 3
168
167
 
169
168
  if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
169
+ s.add_runtime_dependency(%q<spreadsheet>, [">= 0"])
170
170
  else
171
+ s.add_dependency(%q<spreadsheet>, [">= 0"])
171
172
  end
172
173
  else
174
+ s.add_dependency(%q<spreadsheet>, [">= 0"])
173
175
  end
174
176
  end
175
177
 
@@ -72,8 +72,8 @@ module DataShift
72
72
  nil
73
73
  end
74
74
 
75
- def operator?(name)
76
- operator == name
75
+ def operator?(name, case_sensitive = false)
76
+ case_sensitive ? operator == name : operator.downcase == name.downcase
77
77
  end
78
78
 
79
79
  # Return the operator's expected class name, if can be derived, else nil
@@ -59,6 +59,7 @@ module DataShift
59
59
  if(options[:instance_methods] == true)
60
60
  setters = klass.instance_methods.grep(/\w+=/).collect {|x| x.to_s }
61
61
 
62
+ # TODO - Since 3.2 this seems to return lots more stuff including validations which might not be appropriate
62
63
  if(klass.respond_to? :defined_activerecord_methods)
63
64
  setters = setters - klass.defined_activerecord_methods.to_a
64
65
  end
@@ -81,6 +82,13 @@ module DataShift
81
82
  end
82
83
  end
83
84
 
85
+
86
+ def self.add( klass, operator, type = :assignment)
87
+ method_details_mgr = get_method_details_mgr( klass )
88
+ md = MethodDetail.new(operator, klass, operator, type)
89
+ method_details_mgr << md
90
+ return md
91
+ end
84
92
 
85
93
  # Build a thorough and usable picture of the operators by building dictionary of our MethodDetail
86
94
  # objects which can be used to import/export data to objects of type 'klass'
@@ -115,7 +123,7 @@ module DataShift
115
123
  #
116
124
  def self.find_method_detail( klass, external_name )
117
125
 
118
- md_mgr = method_details_mgrs[klass] || MethodDetailsManager.new( klass )
126
+ method_details_mgr = get_method_details_mgr( klass )
119
127
 
120
128
  # md_mgr.all_available_operators.each { |l| puts "DEBUG: Mapped Method : #{l.inspect}" }
121
129
 
@@ -136,7 +144,7 @@ module DataShift
136
144
  # Try each association type, returning first that contains matching operator with name n
137
145
 
138
146
  MethodDetail::supported_types_enum.each do |t|
139
- method_detail = md_mgr.find(n, t)
147
+ method_detail = method_details_mgr.find(n, t)
140
148
  return method_detail if(method_detail)
141
149
  end
142
150
 
@@ -158,6 +166,11 @@ module DataShift
158
166
  "#{klass.name}:#{column}"
159
167
  end
160
168
 
169
+ def self.get_method_details_mgr( klass )
170
+ method_details_mgrs[klass] || MethodDetailsManager.new( klass )
171
+ end
172
+
173
+
161
174
  def self.method_details_mgrs
162
175
  @method_details_mgrs ||= {}
163
176
  @method_details_mgrs
@@ -50,7 +50,10 @@ module DataShift
50
50
  # specified may not be exactly as required e.g handles capitalisation, white space, _ etc
51
51
  # Returns: Array of matching method_details
52
52
  #
53
- def map_inbound_to_methods( klass, columns )
53
+ def map_inbound_to_methods( klass, columns, options = {} )
54
+
55
+ forced = [*options[:force_inclusion]].compact
56
+ forced.collect! { |f| f.downcase }
54
57
 
55
58
  @method_details, @missing_methods = [], []
56
59
 
@@ -60,16 +63,26 @@ module DataShift
60
63
  next
61
64
  end
62
65
 
63
- x, lookup = name.split(MethodMapper::column_delim)
66
+ operator, lookup = name.split(MethodMapper::column_delim)
64
67
  #puts "DEBUG: Find Method Detail for #{x}"
65
- md = MethodDictionary::find_method_detail( klass, x )
68
+ md = MethodDictionary::find_method_detail( klass, operator )
66
69
 
67
70
  # TODO be nice if we could cheeck that the assoc on klass responds to the specified
68
71
  # lookup key now (nice n early)
69
72
  # active_record_helper = "find_by_#{lookup}"
73
+ if(md.nil? && forced.include?(operator.downcase))
74
+ md = MethodDictionary::add(klass, operator)
75
+ end
76
+
77
+ if(md)
78
+
79
+ md.find_by_operator = lookup if(lookup) # TODO and klass.x.respond_to?(active_record_helper))
80
+
81
+ @method_details << md
82
+ else
83
+ @missing_methods << operator
84
+ end
70
85
 
71
- md.find_by_operator = lookup if(lookup) # TODO and klass.x.respond_to?(active_record_helper))
72
- md ? @method_details << md : @missing_methods << x
73
86
  end
74
87
  #@method_details.compact! .. currently we may need to map via the index on @method_details so don't remove nils for now
75
88
  @method_details
@@ -104,10 +104,9 @@ module DataShift
104
104
  end # ExcelGenerator
105
105
 
106
106
  else
107
- class ExcelExporter < ExcelBase
107
+ class ExcelExporter < ExporterBase
108
108
 
109
109
  def initialize(filename)
110
- @filename = filename
111
110
  raise DataShift::BadRuby, "Apologies but DataShift Excel facilities currently need JRuby. Please switch to, or install JRuby"
112
111
  end
113
112
  end
@@ -80,9 +80,7 @@ module DataShift
80
80
  # => Will chdir into the sandbox to load environment as need to mimic being at root of a rails project
81
81
  # chdir back after environment loaded
82
82
 
83
- # gem('rails', '3.2.3')
84
-
85
- def self.boot( database_env)#, rails_version = '3.1.3' )
83
+ def self.boot( database_env)
86
84
 
87
85
  if( ! is_namespace_version )
88
86
  db_connect( database_env )
@@ -90,11 +88,8 @@ module DataShift
90
88
  boot_pre_1
91
89
  @dslog.info "Booted Spree using pre 1.0.0 version"
92
90
  else
93
-
94
-
95
- #em('rails', rails_version)
96
-
97
- db_connect( database_env )#, rails_version )
91
+
92
+ db_connect( database_env )
98
93
 
99
94
  @dslog.info "Booting Spree using version #{SpreeHelper::version}"
100
95
 
@@ -102,13 +97,24 @@ module DataShift
102
97
 
103
98
  store_path = Dir.pwd
104
99
 
105
- Dir.chdir( File.expand_path('../../../sandbox', __FILE__) )
106
-
107
- rails_root = File.expand_path('../../../sandbox', __FILE__)
100
+ spree_sanbox_app = File.expand_path('../../../sandbox', __FILE__)
101
+
102
+ unless(File.exists?(spree_sanbox_app))
103
+ Dir.chdir( File.expand_path( "#{spree_sanbox_app}/..") )
104
+ system('rails new sandbox')
105
+ end
106
+
107
+ rails_root = spree_sanbox_app
108
108
 
109
109
  $:.unshift rails_root
110
110
 
111
- require 'config/environment.rb'
111
+ begin
112
+ require 'config/environment.rb'
113
+ rescue => e
114
+ #somethign in deface seems to blow up suddenly on 1.1
115
+ # puts e.backtrace
116
+ puts "Warning - Potential issue initializing Spree sanbox #{e.inspect}"
117
+ end
112
118
 
113
119
  Dir.chdir( store_path )
114
120
 
@@ -28,16 +28,9 @@ module DataShift
28
28
 
29
29
  @parsed_file = CSV.read(file_name)
30
30
 
31
- @mandatory = options[:mandatory] || []
32
-
33
31
  # Create a method_mapper which maps list of headers into suitable calls on the Active Record class
34
32
  # For example if model has an attribute 'price' will map columns called Price, price, PRICE etc to this attribute
35
- map_headers_to_operators( @parsed_file.shift, options[:strict] , @mandatory )
36
-
37
- unless(@method_mapper.missing_methods.empty?)
38
- logger.error("Following column headings could not be mapped :\n#{@method_mapper.missing_methods.inspect}")
39
- raise MappingDefinitionError, "ERROR: Missing mappings for #{@method_mapper.missing_methods.size} column headings"
40
- end
33
+ map_headers_to_operators( @parsed_file.shift, options)
41
34
 
42
35
  #if(options[:verbose])
43
36
  puts "\n\n\nLoading from CSV file: #{file_name}"
@@ -25,14 +25,14 @@ module DataShift
25
25
  module ExcelLoading
26
26
 
27
27
  # Options:
28
- # [:header_row] : Default is 0. Use alternative row as header definition.
29
- # [:mandatory] : Array of mandatory column names
30
- # [:strict] : Raise exception when no mapping found for a column heading (non mandatory)
31
- # [:sheet_number]
28
+ # [:sheet_number] : Default is 0. The index of the Excel Worksheet to use.
29
+ # [:header_row] : Default is 0. Use alternative row as header definition.
30
+ # [:mandatory] : Array of mandatory column names
31
+ # [:force_inclusion] : Array of inbound column names to force into mapping
32
+ # [:strict] : Raise exception when no mapping found for a column heading (non mandatory)
32
33
 
33
- def perform_excel_load( file_name, options = {} )
34
34
 
35
- @mandatory = options[:mandatory] || []
35
+ def perform_excel_load( file_name, options = {} )
36
36
 
37
37
  @excel = JExcelFile.new
38
38
 
@@ -64,7 +64,7 @@ module DataShift
64
64
 
65
65
  # Create a method_mapper which maps list of headers into suitable calls on the Active Record class
66
66
  # For example if model has an attribute 'price' will map columns called Price, price, PRICE etc to this attribute
67
- map_headers_to_operators( @headers, options[:strict] , @mandatory )
67
+ map_headers_to_operators( @headers, options )
68
68
 
69
69
  logger.info "Excel Loader prcoessing #{@excel.num_rows} rows"
70
70
  load_object_class.transaction do
@@ -120,8 +120,15 @@ module DataShift
120
120
  # CSV files
121
121
  #
122
122
  # OPTIONS :
123
- # strict : Raise exception if any column cannot be mapped
124
-
123
+ #
124
+ # strict : Raise an exception of any headers can't be mapped to an attribute/association
125
+ # ignore : List of column headers to ignore when building operator map
126
+ # mandatory : List of columns that must be present in headers
127
+ #
128
+ # force_inclusion : List of columns that do not map to any operator but should be includeed in processing.
129
+ # This provides the opportunity for loaders to provide specific methods to handle these fields
130
+ # when no direct operator is available on the modle or it's associations
131
+ #
125
132
  def perform_load( file_name, options = {} )
126
133
 
127
134
  raise DataShift::BadFile, "Cannot load #{file_name} file not found." unless(File.exists?(file_name))
@@ -139,21 +146,33 @@ module DataShift
139
146
  end
140
147
 
141
148
 
149
+
142
150
  # Core API - Given a list of free text column names from a file,
143
151
  # map all headers to a method detail containing operator details.
144
152
  #
145
153
  # This is then available through @method_mapper.method_details.each
146
154
  #
147
155
  # Options:
148
- # strict : Raise an exception of any headers can't be mapped to an attribute/association
156
+ # strict : Raise an exception of any headers can't be mapped to an attribute/association
157
+ # ignore : List of column headers to ignore when building operator map
158
+ # mandatory : List of columns that must be present in headers
159
+ #
160
+ # force_inclusion : List of columns that do not map to any operator but should be includeed in processing.
161
+ # This provides the opportunity for loaders to provide specific methods to handle these fields
162
+ # when no direct operator is available on the modle or it's associations
149
163
  #
150
- def map_headers_to_operators( headers, strict, mandatory = [])
164
+ def map_headers_to_operators( headers, options = {} )
151
165
  @headers = headers
152
166
 
167
+ mandatory = options[:mandatory] || []
168
+
169
+
170
+ strict = (options[:strict] == true)
171
+
153
172
  begin
154
- method_details = @method_mapper.map_inbound_to_methods( load_object_class, @headers )
173
+ @method_mapper.map_inbound_to_methods( load_object_class, @headers, options )
155
174
  rescue => e
156
- puts e.inspect
175
+ puts e.inspect, e.backtrace
157
176
  logger.error("Failed to map header row to set of database operators : #{e.inspect}")
158
177
  raise MappingDefinitionError, "Failed to map header row to set of database operators"
159
178
  end
@@ -443,6 +462,15 @@ module DataShift
443
462
  end
444
463
  end
445
464
 
465
+ protected
466
+
467
+ # Take current column data and split into each association
468
+ # Supported Syntax :
469
+ # assoc_find_name:value | assoc2_find_name:value | etc
470
+ def get_each_assoc
471
+ current_value.to_s.split( LoaderBase::multi_assoc_delim )
472
+ end
473
+
446
474
  private
447
475
 
448
476
  def save_if_new
@@ -12,126 +12,134 @@
12
12
  # i.e pulls data from each column and sends to object.
13
13
  #
14
14
  require 'datashift/exceptions'
15
-
15
+
16
16
  module DataShift
17
+
18
+ unless(Guards::jruby?)
19
+
20
+ require 'loaders/loader_base'
17
21
 
18
- require 'loaders/loader_base'
22
+ module SpreadsheetLoading
19
23
 
20
- module SpreadsheetLoading
24
+ gem 'spreadsheet'
25
+ require 'spreadsheet'
21
26
 
22
- # Options:
23
- # [:header_row] : Default is 0. Use alternative row as header definition.
24
- # [:mandatory] : Array of mandatory column names
25
- # [:strict] : Raise exception when no mapping found for a column heading (non mandatory)
26
- # [:sheet_number]
27
27
 
28
- def perform_spreadsheet_load( file_name, options = {} )
28
+ # Spreadsheet.client_encoding = 'UTF-8'F
29
+
30
+ # Options:
31
+ # [:header_row] : Default is 0. Use alternative row as header definition.
32
+ # [:mandatory] : Array of mandatory column names
33
+ # [:strict] : Raise exception when no mapping found for a column heading (non mandatory)
34
+ # [:sheet_number]
29
35
 
30
- @mandatory = options[:mandatory] || []
36
+ def perform_spreadsheet_load( file_name, options = {} )
31
37
 
32
- @excel = JExcelFile.new
38
+ @mandatory = options[:mandatory] || []
33
39
 
34
- @excel.open(file_name)
40
+ @excel = Spreadsheet.open file_name
35
41
 
36
- #if(options[:verbose])
37
- puts "\n\n\nLoading from Excel file: #{file_name}"
42
+ #if(options[:verbose])
43
+ puts "\n\n\nLoading from Excel file: #{file_name}"
38
44
 
39
- sheet_number = options[:sheet_number] || 0
45
+ sheet_number = options[:sheet_number] || 0
40
46
 
41
- @sheet = @excel.sheet( sheet_number )
47
+ @sheet = @excel.sheet( sheet_number )
42
48
 
43
- header_row_index = options[:header_row] || 0
44
- @header_row = @sheet.getRow(header_row_index)
49
+ header_row_index = options[:header_row] || 0
50
+ @header_row = @sheet.getRow(header_row_index)
45
51
 
46
- raise MissingHeadersError, "No headers found - Check Sheet #{@sheet} is complete and Row #{header_row_index} contains headers" unless(@header_row)
52
+ raise MissingHeadersError, "No headers found - Check Sheet #{@sheet} is complete and Row #{header_row_index} contains headers" unless(@header_row)
47
53
 
48
- @headers = []
54
+ @headers = []
49
55
 
50
- (0..JExcelFile::MAX_COLUMNS).each do |i|
51
- cell = @header_row.getCell(i)
52
- break unless cell
53
- header = "#{@excel.cell_value(cell).to_s}".strip
54
- break if header.empty?
55
- @headers << header
56
- end
56
+ (0..JExcelFile::MAX_COLUMNS).each do |i|
57
+ cell = @header_row.getCell(i)
58
+ break unless cell
59
+ header = "#{@excel.cell_value(cell).to_s}".strip
60
+ break if header.empty?
61
+ @headers << header
62
+ end
57
63
 
58
- raise MissingHeadersError, "No headers found - Check Sheet #{@sheet} is complete and Row #{header_row_index} contains headers" if(@headers.empty?)
64
+ raise MissingHeadersError, "No headers found - Check Sheet #{@sheet} is complete and Row #{header_row_index} contains headers" if(@headers.empty?)
59
65
 
60
- # Create a method_mapper which maps list of headers into suitable calls on the Active Record class
61
- map_headers_to_operators( @headers, options[:strict] , @mandatory )
66
+ # Create a method_mapper which maps list of headers into suitable calls on the Active Record class
67
+ map_headers_to_operators( @headers, options)
62
68
 
63
- load_object_class.transaction do
64
- @loaded_objects = []
69
+ load_object_class.transaction do
70
+ @loaded_objects = []
65
71
 
66
- (1..@excel.num_rows).collect do |row|
72
+ (1..@excel.num_rows).collect do |row|
67
73
 
68
- # Excel num_rows seems to return all 'visible' rows, which appears to be greater than the actual data rows
69
- # (TODO - write spec to process .xls with a huge number of rows)
70
- #
71
- # This is rubbish but currently manually detect when actual data ends, this isn't very smart but
72
- # got no better idea than ending once we hit the first completely empty row
73
- break if @excel.sheet.getRow(row).nil?
74
+ # Excel num_rows seems to return all 'visible' rows, which appears to be greater than the actual data rows
75
+ # (TODO - write spec to process .xls with a huge number of rows)
76
+ #
77
+ # This is rubbish but currently manually detect when actual data ends, this isn't very smart but
78
+ # got no better idea than ending once we hit the first completely empty row
79
+ break if @excel.sheet.getRow(row).nil?
74
80
 
75
- contains_data = false
81
+ contains_data = false
76
82
 
77
- # TODO - Smart sorting of column processing order ....
78
- # Does not currently ensure mandatory columns (for valid?) processed first but model needs saving
79
- # before associations can be processed so user should ensure mandatory columns are prior to associations
83
+ # TODO - Smart sorting of column processing order ....
84
+ # Does not currently ensure mandatory columns (for valid?) processed first but model needs saving
85
+ # before associations can be processed so user should ensure mandatory columns are prior to associations
80
86
 
81
- # as part of this we also attempt to save early, for example before assigning to
82
- # has_and_belongs_to associations which require the load_object has an id for the join table
87
+ # as part of this we also attempt to save early, for example before assigning to
88
+ # has_and_belongs_to associations which require the load_object has an id for the join table
83
89
 
84
- # Iterate over the columns method_mapper found in Excel,
85
- # pulling data out of associated column
86
- @method_mapper.method_details.each_with_index do |method_detail, col|
90
+ # Iterate over the columns method_mapper found in Excel,
91
+ # pulling data out of associated column
92
+ @method_mapper.method_details.each_with_index do |method_detail, col|
87
93
 
88
- value = value_at(row, col)
94
+ value = value_at(row, col)
89
95
 
90
- contains_data = true unless(value.nil? || value.to_s.empty?)
96
+ contains_data = true unless(value.nil? || value.to_s.empty?)
91
97
 
92
- #puts "DEBUG: Excel process METHOD :#{method_detail.inspect}", value.inspect
93
- prepare_data(method_detail, value)
98
+ #puts "DEBUG: Excel process METHOD :#{method_detail.inspect}", value.inspect
99
+ prepare_data(method_detail, value)
94
100
 
95
- process()
96
- end
101
+ process()
102
+ end
97
103
 
98
- break unless(contains_data == true)
104
+ break unless(contains_data == true)
99
105
 
100
- # TODO - requirements to handle not valid ?
101
- # all or nothing or carry on and dump out the exception list at end
102
- #puts "DEBUG: FINAL SAVE #{load_object.inspect}"
103
- save
104
- #puts "DEBUG: SAVED #{load_object.inspect}"
106
+ # TODO - requirements to handle not valid ?
107
+ # all or nothing or carry on and dump out the exception list at end
108
+ #puts "DEBUG: FINAL SAVE #{load_object.inspect}"
109
+ save
110
+ #puts "DEBUG: SAVED #{load_object.inspect}"
105
111
 
106
- # don't forget to reset the object or we'll update rather than create
107
- new_load_object
112
+ # don't forget to reset the object or we'll update rather than create
113
+ new_load_object
108
114
 
115
+ end
109
116
  end
117
+ puts "Spreadsheet loading stage complete - #{loaded_objects.size} rows added."
110
118
  end
111
- puts "Spreadsheet loading stage complete - #{loaded_objects.size} rows added."
112
- end
113
119
 
114
- def value_at(row, column)
115
- @excel.get_cell_value( @excel.sheet.getRow(row), column)
120
+ def value_at(row, column)
121
+ @excel.get_cell_value( @excel.sheet.getRow(row), column)
122
+ end
116
123
  end
117
- end
118
124
 
119
125
 
120
- class SpreadsheetLoader < LoaderBase
126
+ class SpreadsheetLoader < LoaderBase
121
127
 
122
- include SpreadsheetLoading
128
+ include SpreadsheetLoading
123
129
 
124
- def initialize(klass, object = nil, options = {})
125
- super( klass, object, options )
126
- raise "Cannot load - failed to create a #{klass}" unless @load_object
127
- end
130
+ def initialize(klass, object = nil, options = {})
131
+ super( klass, object, options )
132
+ raise "Cannot load - failed to create a #{klass}" unless @load_object
133
+ end
128
134
 
129
- def perform_load( file_name, options = {} )
130
- perform_spreadsheet_load( file_name, options )
135
+ def perform_load( file_name, options = {} )
136
+ perform_spreadsheet_load( file_name, options )
131
137
 
132
- puts "Spreadsheet loading stage complete - #{loaded_objects.size} rows added."
133
- end
138
+ puts "Spreadsheet loading stage complete - #{loaded_objects.size} rows added."
139
+ end
134
140
 
135
- end
141
+ end
136
142
 
143
+ end
144
+
137
145
  end