datashift 0.40.3 → 0.40.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. checksums.yaml +5 -5
  2. data/README.md +7 -2
  3. data/datashift.thor +28 -23
  4. data/lib/datashift.rb +6 -6
  5. data/lib/datashift/binder.rb +30 -11
  6. data/lib/datashift/configuration.rb +10 -2
  7. data/lib/datashift/core_ext/array.rb +7 -7
  8. data/lib/datashift/delimiters.rb +1 -1
  9. data/lib/datashift/doc_context.rb +1 -1
  10. data/lib/datashift/excel_base.rb +2 -2
  11. data/lib/datashift/exporters/csv_exporter.rb +0 -1
  12. data/lib/datashift/exporters/excel_exporter.rb +3 -4
  13. data/lib/datashift/file_definitions.rb +1 -3
  14. data/lib/datashift/inbound_data/method_binding.rb +5 -5
  15. data/lib/datashift/loaders/csv_loader.rb +2 -3
  16. data/lib/datashift/loaders/excel_loader.rb +8 -4
  17. data/lib/datashift/loaders/failure_data.rb +1 -3
  18. data/lib/datashift/loaders/loader_base.rb +2 -8
  19. data/lib/datashift/loaders/loader_factory.rb +6 -0
  20. data/lib/datashift/loaders/paperclip/attachment_loader.rb +1 -1
  21. data/lib/datashift/loaders/paperclip/datashift_paperclip.rb +2 -2
  22. data/lib/datashift/loaders/paperclip/image_loading.rb +2 -2
  23. data/lib/datashift/mapping/data_flow_schema.rb +40 -14
  24. data/lib/datashift/mapping/mapper_utils.rb +3 -3
  25. data/lib/datashift/model_methods/catalogue.rb +14 -14
  26. data/lib/datashift/model_methods/model_method.rb +5 -6
  27. data/lib/datashift/model_methods/operator.rb +1 -1
  28. data/lib/datashift/node_context.rb +2 -3
  29. data/lib/datashift/populators/has_many.rb +2 -2
  30. data/lib/datashift/populators/insistent_assignment.rb +4 -4
  31. data/lib/datashift/populators/populator.rb +21 -16
  32. data/lib/datashift/populators/populator_factory.rb +2 -4
  33. data/lib/datashift/querying.rb +4 -5
  34. data/lib/datashift/transformation/factory.rb +3 -3
  35. data/lib/datashift/version.rb +1 -1
  36. data/lib/generators/datashift/install_generator.rb +3 -3
  37. data/lib/generators/templates/datashift.rb +0 -4
  38. data/lib/tasks/config.thor +7 -9
  39. data/lib/tasks/export.thor +44 -45
  40. data/lib/tasks/generate.thor +43 -37
  41. data/lib/tasks/import.thor +20 -14
  42. data/lib/tasks/paperclip.thor +46 -48
  43. data/lib/tasks/thor_behaviour.rb +1 -1
  44. data/lib/tasks/to_convert_to_thor/db_tasks.rake +1 -3
  45. data/lib/tasks/tools.thor +37 -38
  46. metadata +45 -45
@@ -23,13 +23,13 @@ module DataShift
23
23
  extend DataShift::Delimiters
24
24
 
25
25
  def self.insistent_method_list
26
- @insistent_method_list ||= [:to_s, :downcase, :to_i, :to_f, :to_b]
26
+ @insistent_method_list ||= %i[to_s downcase to_i to_f to_b]
27
27
  end
28
28
 
29
29
  # When looking up an association, when no field provided, try each of these in turn till a match
30
30
  # i.e find_by_name, find_by_title, find_by_id
31
31
  def self.insistent_find_by_list
32
- @insistent_find_by_list ||= [:name, :title, :id]
32
+ @insistent_find_by_list ||= %i[name title id]
33
33
  end
34
34
 
35
35
  attr_reader :value, :attribute_hash
@@ -99,7 +99,7 @@ module DataShift
99
99
  @value = data
100
100
 
101
101
  elsif(!DataShift::Guards.jruby? &&
102
- (data.is_a?(Spreadsheet::Formula) || data.class.ancestors.include?(Spreadsheet::Formula)) )
102
+ (data.is_a?(Spreadsheet::Formula) || data.class.ancestors.include?(Spreadsheet::Formula)) )
103
103
 
104
104
  @value = data.value # TOFIX jruby/apache poi equivalent ?
105
105
 
@@ -129,8 +129,7 @@ module DataShift
129
129
  end
130
130
 
131
131
  run_transforms(method_binding)
132
-
133
- rescue => e
132
+ rescue StandardError => e
134
133
  logger.error(e.message)
135
134
  logger.error("Populator stacktrace: #{e.backtrace.first}")
136
135
  raise DataProcessingError, "Populator failed to prepare data [#{value}] for #{method_binding.pp}"
@@ -156,11 +155,17 @@ module DataShift
156
155
 
157
156
  elsif model_method.operator_for(:has_one)
158
157
 
159
- if value.is_a?(model_method.klass)
158
+ begin
160
159
  record.send(operator + '=', value)
161
- else
162
- logger.error("Cannot assign value [#{value.inspect}]")
163
- logger.error("Value was Type (#{value.class}) - Required Type for has_one #{operator} is [#{klass}]")
160
+ rescue StandardError => x
161
+ logger.error("Cannot assign value [#{value.inspect}] for has_one [#{operator}]")
162
+ logger.error(x.inspect)
163
+
164
+ if value.is_a?(model_method.klass)
165
+ logger.error("Value was Correct Type (#{value.class}) - [#{model_method.klass}]")
166
+ else
167
+ logger.error("Value was Type (#{value.class}) - Required Type is [#{model_method.klass}]")
168
+ end
164
169
  end
165
170
 
166
171
  elsif model_method.operator_for(:assignment)
@@ -182,15 +187,17 @@ module DataShift
182
187
  begin
183
188
  params_num = record.method(operator.to_sym).arity
184
189
 
185
- # think this should be == 0 but seen situations where -1 returned even though method accepts ZERO params
186
- if(params_num < 1)
190
+ # There are situations where -1 returned, this is normally related to variable number of arguments,
191
+ # but maybe buggy - have seen -1 for what seems perfceatlly normal method e.g def attach_audio_file_helper(file_name)
192
+ #
193
+ if(params_num == 0)
187
194
  logger.debug("Calling Custom Method (no value) [#{operator}]")
188
195
  record.send(operator)
189
196
  elsif(value)
190
197
  logger.debug("Custom Method assignment of value #{value} => [#{operator}]")
191
198
  record.send(operator, value)
192
199
  end
193
- rescue => e
200
+ rescue StandardError => e
194
201
  logger.error e.backtrace.first
195
202
  raise DataProcessingError, "Method [#{operator}] could not process #{value} - #{e.inspect}"
196
203
  end
@@ -223,7 +230,7 @@ module DataShift
223
230
  record.send( operator + '=', value)
224
231
  end
225
232
  end
226
- rescue => e
233
+ rescue StandardError => e
227
234
  logger.error e.backtrace.first
228
235
  logger.error("Assignment failed #{e.inspect}")
229
236
  raise DataProcessingError, "Failed to set [#{value}] via [#{operator}] due to ERROR : #{e.message}"
@@ -251,7 +258,6 @@ module DataShift
251
258
  # TODO: - add find by operators from headers or configuration to insistent_find_by_list
252
259
  Populator.insistent_find_by_list.each do |find_by|
253
260
  begin
254
-
255
261
  item = klass.where(find_by => value).first_or_create
256
262
 
257
263
  next unless item
@@ -259,8 +265,7 @@ module DataShift
259
265
  logger.info("Populator assigning #{item.inspect} to belongs_to association #{operator}")
260
266
  record.send(operator + '=', item)
261
267
  break
262
-
263
- rescue => e
268
+ rescue StandardError => e
264
269
  logger.error(e.inspect)
265
270
  logger.error("Failed attempting to find belongs_to for #{method_binding.pp}")
266
271
  if find_by == Populator.insistent_method_list.last
@@ -25,7 +25,7 @@ module DataShift
25
25
  #
26
26
  def self.configure(load_object_class, yaml_file)
27
27
 
28
- @config = YAML.load( ERB.new( IO.read(yaml_file) ).result )
28
+ @config = Configuration.parse_yaml(yaml_file)
29
29
 
30
30
  if @config[:datashift_populators]
31
31
  @config[:datashift_populators].each do |_operator, type|
@@ -66,9 +66,7 @@ module DataShift
66
66
  def self.get_populator(method_binding)
67
67
 
68
68
  unless method_binding.nil? || method_binding.invalid?
69
- if(populators.key?(method_binding.operator))
70
- return populators[method_binding.operator].new
71
- end
69
+ return populators[method_binding.operator].new if populators.key?(method_binding.operator)
72
70
  end
73
71
 
74
72
  global_populator_class.new
@@ -40,7 +40,7 @@ module DataShift
40
40
 
41
41
  heading_lookups = method_binding.inbound_column.lookup_list
42
42
 
43
- if (part1.nil? || part1.empty?) && (part2.nil? || part2.empty?)
43
+ if part1.blank? && part2.blank?
44
44
 
45
45
  # Column completely empty - check for lookup supplied via the
46
46
  # inbound column headers/config
@@ -50,7 +50,7 @@ module DataShift
50
50
  part1 = heading_lookups.find_by_operator
51
51
  part2 = heading_lookups.collect(&:value)
52
52
 
53
- elsif part2.nil? || part2.empty?
53
+ elsif part2.blank?
54
54
 
55
55
  # Only **value(s)** in column, so use field from header/config field
56
56
 
@@ -82,7 +82,7 @@ module DataShift
82
82
  return klazz.where("#{field} like ?", "#{search_term}%").first if options[:use_like]
83
83
 
84
84
  return klazz.where("lower(#{field}) = ?", search_term.downcase).first
85
- rescue => e
85
+ rescue StandardError => e
86
86
  logger.error("Querying - Failed to find a record for [#{search_term}] on #{klazz}.#{field}")
87
87
  logger.error e.inspect
88
88
  logger.error e.backtrace.last
@@ -108,7 +108,6 @@ module DataShift
108
108
  logger.info("Scanning for record where #{klazz}.#{field} ~= #{find_search_term}")
109
109
 
110
110
  begin
111
-
112
111
  record = search_for_record(klazz, field, find_search_term)
113
112
 
114
113
  unless record
@@ -134,7 +133,7 @@ module DataShift
134
133
  end
135
134
 
136
135
  return record
137
- rescue => e
136
+ rescue StandardError => e
138
137
  logger.error("Exception attempting to find a record for [#{search_term}] on #{klazz}.#{field}")
139
138
  logger.error e.backtrace
140
139
  logger.error e.inspect
@@ -43,7 +43,7 @@ module DataShift
43
43
 
44
44
  module Transformation
45
45
 
46
- extend self
46
+ module_function
47
47
 
48
48
  # Yields a singleton instance of Transformations::Factory
49
49
  # so you can specify additional transforms in .rb config
@@ -62,7 +62,7 @@ module DataShift
62
62
 
63
63
  class Factory
64
64
 
65
- TRANSFORMERS_HASH_INSTANCE_NAMES = [:default, :override, :substitution, :prefix, :postfix].freeze
65
+ TRANSFORMERS_HASH_INSTANCE_NAMES = %i[default override substitution prefix postfix].freeze
66
66
 
67
67
  include DataShift::Logging
68
68
 
@@ -99,7 +99,7 @@ module DataShift
99
99
  #
100
100
  def configure_from(load_object_class, yaml_file, locale_key = 'data_flow_schema')
101
101
 
102
- data = YAML.load( ERB.new( IO.read(yaml_file) ).result )
102
+ data = Configuration.parse_yaml(yaml_file)
103
103
 
104
104
  class_name = load_object_class.name
105
105
 
@@ -1,3 +1,3 @@
1
1
  module DataShift
2
- VERSION = '0.40.3'.freeze unless defined?(VERSION)
2
+ VERSION = '0.40.4'.freeze unless defined?(VERSION)
3
3
  end
@@ -3,13 +3,13 @@ require 'rails/generators/base'
3
3
  module Datashift
4
4
  module Generators
5
5
  class InstallGenerator < Rails::Generators::Base
6
- source_root File.expand_path("../../templates", __FILE__)
6
+ source_root File.expand_path('../templates', __dir__)
7
7
 
8
- desc "Creates a DataShift initializer within your Rails application."
8
+ desc 'Creates a DataShift initializer within your Rails application.'
9
9
  class_option :orm
10
10
 
11
11
  def copy_initializer
12
- template "datashift.rb", "config/initializers/datashift.rb"
12
+ template 'datashift.rb', 'config/initializers/datashift.rb'
13
13
  end
14
14
 
15
15
  def rails_4?
@@ -8,7 +8,6 @@ DataShift::Configuration.call do |config|
8
8
  #
9
9
  # config.with = [:assignment, :enum]
10
10
 
11
-
12
11
  # Configure what association types to ignore during export with associations.
13
12
  #
14
13
  # The default is to include ALL all association TYPES as defined by
@@ -18,17 +17,14 @@ DataShift::Configuration.call do |config|
18
17
  #
19
18
  # config.exclude = [:belongs_to]
20
19
 
21
-
22
20
  # Configure the Global list of of columns to remove/ignore from files
23
21
  #
24
22
  # config.remove_columns = [:id, :dont_want_this, :no]
25
23
 
26
-
27
24
  # List of headers/columns that are Mandatory i.e must be present in the inbound data
28
25
  #
29
26
  # config.mandatory = [:yes]
30
27
 
31
-
32
28
  # Remove standard Rails cols like :id, created_at, updated_at
33
29
  # Default is false
34
30
  #
@@ -14,30 +14,28 @@ module Datashift
14
14
 
15
15
  class Generate < Thor
16
16
 
17
- DEFAULT_IMPORT_TEMPLTE ||= "import_mapping_template.yaml".freeze
17
+ DEFAULT_IMPORT_TEMPLTE ||= 'import_mapping_template.yaml'.freeze
18
18
 
19
19
  include DataShift::ThorBehavior
20
20
 
21
- desc "import", "Generate an Import configuration template (YAML)"
21
+ desc 'import', 'Generate an Import configuration template (YAML)'
22
22
 
23
- method_option :model, aliases: '-m', required: true, desc: "The active record model to use for mappings"
23
+ method_option :model, aliases: '-m', required: true, desc: 'The active record model to use for mappings'
24
24
 
25
25
  method_option :result, aliases: '-r', required: true,
26
- desc: "Path or file to create resulting YAML config\n\nIf a PATH supplied, filename will be [#{DEFAULT_IMPORT_TEMPLTE}]"
26
+ desc: "Path or file to create resulting YAML config\n\nIf a PATH supplied, filename will be [#{DEFAULT_IMPORT_TEMPLTE}]"
27
27
 
28
28
  # :remove_columns - List of columns to remove from files
29
29
  #
30
30
  # :remove_rails - Remove standard Rails cols like :id, created_at etc
31
31
 
32
- def import()
32
+ def import
33
33
 
34
34
  start_connections
35
35
 
36
36
  result = options[:result]
37
37
 
38
- if(File.directory?(result))
39
- result = File.join(result, DEFAULT_IMPORT_TEMPLTE)
40
- end
38
+ result = File.join(result, DEFAULT_IMPORT_TEMPLTE) if File.directory?(result)
41
39
 
42
40
  logger.info "Datashift: Starting Import mapping template generation to [#{result}]"
43
41
 
@@ -46,7 +44,7 @@ module Datashift
46
44
  puts "Creating new configuration file : [#{result}]"
47
45
  begin
48
46
  mapper.write_import(result, options[:model], options)
49
- rescue => x
47
+ rescue StandardError => x
50
48
  puts "ERROR - Failed to create config file #{result}"
51
49
  puts x.message
52
50
  end
@@ -17,37 +17,37 @@ module Datashift
17
17
  include DataShift::ThorBehavior
18
18
 
19
19
  class_option :associations, aliases: '-a',
20
- type: :boolean,
21
- desc: 'Include associations. Can be further refined by :with & :exclude'
20
+ type: :boolean,
21
+ desc: 'Include associations. Can be further refined by :with & :exclude'
22
22
 
23
23
  class_option :expand_associations, type: :boolean,
24
- desc: 'Expand association data to multiple columns i.e 1 column per attribute'
24
+ desc: 'Expand association data to multiple columns i.e 1 column per attribute'
25
25
 
26
26
  class_option :methods, type: :array,
27
- desc: 'List of additional methods to call on model, useful for situations like delegated methods'
27
+ desc: 'List of additional methods to call on model, useful for situations like delegated methods'
28
28
 
29
29
  class_option :with, type: :array,
30
- desc: "Restrict association types. Choose from #{DataShift::ModelMethod.supported_types_enum.inspect}"
30
+ desc: "Restrict association types. Choose from #{DataShift::ModelMethod.supported_types_enum.inspect}"
31
31
 
32
32
  class_option :exclude, type: :array,
33
- desc: "Exclude association types. Choose from #{DataShift::ModelMethod.supported_types_enum.inspect}"
33
+ desc: "Exclude association types. Choose from #{DataShift::ModelMethod.supported_types_enum.inspect}"
34
34
 
35
35
  class_option :remove, type: :array,
36
- desc: "Don't include this list of supplied fields"
36
+ desc: "Don't include this list of supplied fields"
37
37
 
38
38
  class_option :remove_rails, type: :boolean,
39
- desc: "Remove standard Rails cols : #{DataShift::Configuration.rails_columns.inspect}"
39
+ desc: "Remove standard Rails cols : #{DataShift::Configuration.rails_columns.inspect}"
40
40
 
41
41
  class_option :json, type: :boolean,
42
- desc: 'Export association data as json rather than hash'
42
+ desc: 'Export association data as json rather than hash'
43
43
 
44
- desc "excel", "export any active record model (with optional associations)"
44
+ desc 'excel', 'export any active record model (with optional associations)'
45
45
 
46
- method_option :model, :aliases => '-m', :required => true, desc: "The active record model to export"
47
- method_option :result, :aliases => '-r', :required => true, desc: "Create template of model in supplied file"
48
- method_option :sheet_name, :type => :string, desc: "Name to use for Excel worksheet instead of model name"
46
+ method_option :model, aliases: '-m', required: true, desc: 'The active record model to export'
47
+ method_option :result, aliases: '-r', required: true, desc: 'Create template of model in supplied file'
48
+ method_option :sheet_name, type: :string, desc: 'Name to use for Excel worksheet instead of model name'
49
49
 
50
- def excel()
50
+ def excel
51
51
  start_connections
52
52
 
53
53
  export(DataShift::ExcelExporter.new)
@@ -55,13 +55,12 @@ module Datashift
55
55
  puts "Datashift: Excel export COMPLETED to #{options[:result]}"
56
56
  end
57
57
 
58
+ desc 'csv', 'export any active record model (with optional associations)'
58
59
 
59
- desc "csv", "export any active record model (with optional associations)"
60
+ method_option :model, aliases: '-m', required: true, desc: 'The active record model to export'
61
+ method_option :result, aliases: '-r', required: true, desc: 'Create template of model in supplied file'
60
62
 
61
- method_option :model, :aliases => '-m', :required => true, desc: "The active record model to export"
62
- method_option :result, :aliases => '-r', :required => true, desc: "Create template of model in supplied file"
63
-
64
- def csv()
63
+ def csv
65
64
  start_connections
66
65
 
67
66
  export(DataShift::CsvExporter.new)
@@ -69,28 +68,27 @@ module Datashift
69
68
  puts "Datashift: CSV export COMPLETED to #{options[:result]}"
70
69
  end
71
70
 
71
+ desc 'db', 'Export every Active Record model'
72
72
 
73
- desc "db", "Export every Active Record model"
74
-
75
- method_option :path, :aliases => '-p', :required => true, desc: "Path in which to create export files"
76
- method_option :csv, :aliases => '-c', desc: "Export to CSV instead - Excel is default."
73
+ method_option :path, aliases: '-p', required: true, desc: 'Path in which to create export files'
74
+ method_option :csv, aliases: '-c', desc: 'Export to CSV instead - Excel is default.'
77
75
 
78
- method_option :prefix_map, :aliases => '-x', type: :hash, :default => {},
79
- desc: "For namespaced tables/models map table prefix to module name e.g spree_: Spree"
76
+ method_option :prefix_map, aliases: '-x', type: :hash, default: {},
77
+ desc: 'For namespaced tables/models map table prefix to module name e.g spree_: Spree'
80
78
 
81
- method_option :modules, :aliases => '-m', type: :array, :default => [],
82
- desc: "List of Modules to search for namespaced models"
79
+ method_option :modules, aliases: '-m', type: :array, default: [],
80
+ desc: 'List of Modules to search for namespaced models'
83
81
 
84
- def db()
82
+ def db
85
83
 
86
84
  start_connections
87
85
 
88
86
  unless File.directory?(options[:path])
89
87
  puts "WARNING : No such PATH found #{options[:path]} - trying mkdir"
90
- FileUtils::mkdir_p(options[:path])
88
+ FileUtils.mkdir_p(options[:path])
91
89
  end
92
90
 
93
- exporter = options[:csv] ? DataShift::CsvExporter.new : DataShift::ExcelExporter.new
91
+ exporter = options[:csv] ? DataShift::CsvExporter.new : DataShift::ExcelExporter.new
94
92
 
95
93
  DataShift::Exporters::Configuration.from_hash(options)
96
94
 
@@ -105,13 +103,15 @@ module Datashift
105
103
  break if(@klass)
106
104
  end
107
105
 
108
- options[:prefix_map].each do |p, m|
109
- @klass = DataShift::MapperUtils.table_to_arclass(table.gsub(p, ''), m)
110
- break if(@klass)
111
- end unless(@klass)
106
+ unless @klass
107
+ options[:prefix_map].each do |p, m|
108
+ @klass = DataShift::MapperUtils.table_to_arclass(table.gsub(p, ''), m)
109
+ break if(@klass)
110
+ end
111
+ end
112
112
 
113
113
  if(@klass.nil?)
114
- puts "ERROR: No Model found for Table [#{table}] - perhaps check modules/prefixes"
114
+ puts "ERROR: No Model found for Table [#{table}] - perhaps check modules/prefixes"
115
115
  next
116
116
  end
117
117
 
@@ -121,15 +121,15 @@ module Datashift
121
121
 
122
122
  begin
123
123
  if(options[:associations])
124
- logger.info("Datashift: Exporting with associations")
124
+ logger.info('Datashift: Exporting with associations')
125
125
  exporter.export_with_associations(result, @klass, @klass.all)
126
126
  else
127
- exporter.export(result, @klass.all, :sheet_name => @klass.name)
127
+ exporter.export(result, @klass.all, sheet_name: @klass.name)
128
128
  end
129
- rescue => e
129
+ rescue StandardError => e
130
130
  puts e
131
131
  puts e.backtrace
132
- puts "Warning: Error during export, data may be incomplete"
132
+ puts 'Warning: Error during export, data may be incomplete'
133
133
  end
134
134
  end
135
135
  end
@@ -144,27 +144,26 @@ module Datashift
144
144
 
145
145
  logger.info "Datashift: Starting export with #{exporter.class.name} to #{result}"
146
146
 
147
- klass = DataShift::MapperUtils::class_from_string(model) #Kernel.const_get(model)
147
+ klass = DataShift::MapperUtils.class_from_string(model) # Kernel.const_get(model)
148
148
 
149
149
  raise "ERROR: No such Model [#{model}] found - check valid model supplied via -model <Class>" if(klass.nil?)
150
150
 
151
151
  begin
152
-
153
152
  if(options[:associations])
154
- logger.info("Datashift: Exporting with associations")
153
+ logger.info('Datashift: Exporting with associations')
155
154
  exporter.export_with_associations(result, klass, klass.all)
156
155
  else
157
156
  exporter.export(result, klass.all, options)
158
157
  end
159
- rescue => e
158
+ rescue StandardError => e
160
159
  puts e
161
160
  puts e.backtrace
162
- puts "Warning: Error during export, data may be incomplete"
161
+ puts 'Warning: Error during export, data may be incomplete'
163
162
  end
164
163
 
165
164
  end
166
165
 
167
- end # no_commands
166
+ end # no_commands
168
167
 
169
168
  end
170
169