datashift 0.40.3 → 0.40.4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. checksums.yaml +5 -5
  2. data/README.md +7 -2
  3. data/datashift.thor +28 -23
  4. data/lib/datashift.rb +6 -6
  5. data/lib/datashift/binder.rb +30 -11
  6. data/lib/datashift/configuration.rb +10 -2
  7. data/lib/datashift/core_ext/array.rb +7 -7
  8. data/lib/datashift/delimiters.rb +1 -1
  9. data/lib/datashift/doc_context.rb +1 -1
  10. data/lib/datashift/excel_base.rb +2 -2
  11. data/lib/datashift/exporters/csv_exporter.rb +0 -1
  12. data/lib/datashift/exporters/excel_exporter.rb +3 -4
  13. data/lib/datashift/file_definitions.rb +1 -3
  14. data/lib/datashift/inbound_data/method_binding.rb +5 -5
  15. data/lib/datashift/loaders/csv_loader.rb +2 -3
  16. data/lib/datashift/loaders/excel_loader.rb +8 -4
  17. data/lib/datashift/loaders/failure_data.rb +1 -3
  18. data/lib/datashift/loaders/loader_base.rb +2 -8
  19. data/lib/datashift/loaders/loader_factory.rb +6 -0
  20. data/lib/datashift/loaders/paperclip/attachment_loader.rb +1 -1
  21. data/lib/datashift/loaders/paperclip/datashift_paperclip.rb +2 -2
  22. data/lib/datashift/loaders/paperclip/image_loading.rb +2 -2
  23. data/lib/datashift/mapping/data_flow_schema.rb +40 -14
  24. data/lib/datashift/mapping/mapper_utils.rb +3 -3
  25. data/lib/datashift/model_methods/catalogue.rb +14 -14
  26. data/lib/datashift/model_methods/model_method.rb +5 -6
  27. data/lib/datashift/model_methods/operator.rb +1 -1
  28. data/lib/datashift/node_context.rb +2 -3
  29. data/lib/datashift/populators/has_many.rb +2 -2
  30. data/lib/datashift/populators/insistent_assignment.rb +4 -4
  31. data/lib/datashift/populators/populator.rb +21 -16
  32. data/lib/datashift/populators/populator_factory.rb +2 -4
  33. data/lib/datashift/querying.rb +4 -5
  34. data/lib/datashift/transformation/factory.rb +3 -3
  35. data/lib/datashift/version.rb +1 -1
  36. data/lib/generators/datashift/install_generator.rb +3 -3
  37. data/lib/generators/templates/datashift.rb +0 -4
  38. data/lib/tasks/config.thor +7 -9
  39. data/lib/tasks/export.thor +44 -45
  40. data/lib/tasks/generate.thor +43 -37
  41. data/lib/tasks/import.thor +20 -14
  42. data/lib/tasks/paperclip.thor +46 -48
  43. data/lib/tasks/thor_behaviour.rb +1 -1
  44. data/lib/tasks/to_convert_to_thor/db_tasks.rake +1 -3
  45. data/lib/tasks/tools.thor +37 -38
  46. metadata +45 -45
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA256:
3
- metadata.gz: 53ca961606b77228304650d8087f155975c54691f7b7dd8b9c7aa17595c3088f
4
- data.tar.gz: 01bb10cbbfac38feb2ec994a5db033aba22cc423e1051c42c0fa1e1c403910aa
2
+ SHA1:
3
+ metadata.gz: d7be9f9beb4237a952c953f59c0e887124a3222a
4
+ data.tar.gz: e68197e5dd89fa5a21165b9bd8e606e58650dfc7
5
5
  SHA512:
6
- metadata.gz: aa5e57594b2b5f8e893b2153300566a06f298d0174059a963d1de3ba1b797e960066a0b5c6949397b4b335f58978b3f5cfea460bab39bcaa776fc1b6a32a6b3d
7
- data.tar.gz: 3677cc69f278063588621cdd7849a4633cc755cb03deb9d52f748be2ade59575eb4160fc1177e612ac331a23e2edd57f5bac7f37b50b80bb0a9954ff99da2051
6
+ metadata.gz: d888089a643a05aedb27deabb01c814dc27940d1ef96475ebb320f6b71799629ae89797df740217569dc9a86f5abe8010ca4e6f5be5fb077b97594f5d49fed37
7
+ data.tar.gz: dfc8a6dca16c663df3b0ae8959de6382f0ff10feeadaeaa019e04083d37ccc3512fcedc4deee610f3115ceb43c6d6bef152a80e33f7afac5e76cd38d2b8f4a95
data/README.md CHANGED
@@ -249,9 +249,14 @@ is to run our rail's install generator :
249
249
  ```ruby
250
250
  rails g datashift:install
251
251
  ```
252
+
253
+ You can create a model specific file at anytime via
254
+ ```bash
255
+ thor datashift:config:generate:import
256
+ ```
252
257
 
253
- To create such a file manually, create an initialisation file within `config/initializers`,
254
- and see `lib/datashift/configuration.rb` for details of options, for example
258
+ To create a Rails tyle config block manually, create an initialisation file within `config/initializers`,
259
+ and see [lib/datashift/configuration.rb](lib/datashift/configuration.rb`) for details of all possible options, for example
255
260
 
256
261
  ```ruby
257
262
  DataShift::Configuration.call do |c|
@@ -10,7 +10,7 @@
10
10
  # type applications, files and databases.
11
11
  #
12
12
  # Provides support for moving data between .xls (Excel/OpenOffice)
13
- # Spreedsheets via Ruby and AR, enabling direct import/export of
13
+ # Spreedsheets via Ruby and AR, enabling direct import/export of
14
14
  # ActiveRecord models with all their associations from database.
15
15
  #
16
16
  # Provides support for moving data between csv files and AR, enabling direct
@@ -18,14 +18,14 @@
18
18
  #
19
19
  require 'thor'
20
20
 
21
- $:.push File.expand_path("lib", __FILE__)
21
+ $LOAD_PATH.push File.expand_path('lib')
22
22
 
23
23
  require 'datashift'
24
24
  require 'factory_bot_rails'
25
25
  require 'database_cleaner'
26
26
 
27
27
  require_relative File.join('spec', 'support/sandbox')
28
- require_relative File.join('spec' ,'support/datashift_test_helpers')
28
+ require_relative File.join('spec', 'support/datashift_test_helpers')
29
29
 
30
30
  module Datashift
31
31
 
@@ -33,7 +33,7 @@ module Datashift
33
33
 
34
34
  include DataShift::TestHelpers
35
35
 
36
- desc "lint", "Run in spec - Verify that FactoryBot factories are valid"
36
+ desc 'lint', 'Run in spec - Verify that FactoryBot factories are valid'
37
37
 
38
38
  def lint
39
39
 
@@ -44,7 +44,7 @@ module Datashift
44
44
  begin
45
45
  DatabaseCleaner.start
46
46
 
47
- puts "Running FactoryBot.lint"
47
+ puts 'Running FactoryBot.lint'
48
48
  FactoryBot.lint
49
49
  ensure
50
50
  DatabaseCleaner.clean
@@ -52,36 +52,41 @@ module Datashift
52
52
 
53
53
  end
54
54
 
55
-
56
- desc "sandbox", 'Rebuild the dummy rails app in spec - required for testing'
55
+ desc 'sandbox', 'Rebuild the dummy rails app in spec - required for testing'
57
56
 
58
57
  def sandbox
59
58
  # Need an active record DB to test against, so we manage own Rails sandbox
60
59
  DataShift::Sandbox.gen_rails_sandbox( :force )
61
60
  end
62
61
 
63
- desc "build", 'Build gem and install in one step'
62
+ desc 'build', 'Build gem and install in one step'
64
63
 
65
- method_option :bump, :aliases => '-b', type: :string, desc: "Bump the version", required: false
64
+ method_option :bump, aliases: '-b', type: :string, desc: 'Bump the version', required: false
66
65
 
67
- method_option :push, :aliases => '-p', :desc => "Push resulting gem to rubygems.org"
66
+ method_option :push, aliases: '-p', desc: 'Push resulting gem to rubygems.org'
68
67
 
69
- method_option :install, :aliases => '-i',
70
- :desc => "Install freshly built gem locally", type: :boolean, default: false
68
+ method_option :install, aliases: '-i',
69
+ desc: 'Install freshly built gem locally', type: :boolean, default: false
71
70
 
72
71
  def build
73
72
 
74
- raise "Please bump to a new version to install at rubygems" if options[:install] && options[:bump].blank?
75
- version = options[:bump]
73
+ if options[:push] && (options[:bump].blank? || options[:bump] !~ /^(\d+\.)?(\d+\.)?(\*|\d+)$/)
74
+ puts 'ERROR: Please bump to a new numeric version to push to rubygems'
75
+ exit(-1)
76
+ end
77
+
78
+ version = options[:bump] || DataShift::VERSION
76
79
 
77
80
  # Bump the VERSION file in library
78
- File.open( File.join('lib/datashift/version.rb'), 'w') do |f|
79
- f << "module DataShift\n"
80
- f << " VERSION = '#{version}'.freeze unless defined?(VERSION)\n"
81
- f << "end\n"
82
- end if(options[:bump].present?)
81
+ if options[:bump].present?
82
+ File.open( File.join('lib/datashift/version.rb'), 'w') do |f|
83
+ f << "module DataShift\n"
84
+ f << " VERSION = '#{version}'.freeze unless defined?(VERSION)\n"
85
+ f << "end\n"
86
+ end
87
+ end
83
88
 
84
- build_cmd = "gem build datashift.gemspec"
89
+ build_cmd = 'gem build datashift.gemspec'
85
90
 
86
91
  puts "\n*** Running build cmd [#{build_cmd}]"
87
92
 
@@ -111,13 +116,13 @@ module Datashift
111
116
  if File.exist?(env)
112
117
  begin
113
118
  require env
114
- rescue => e
119
+ rescue StandardError => e
115
120
  logger.error("Failed to initialise ActiveRecord : #{e.message}")
116
- raise ConnectionError.new("Failed to initialise ActiveRecord : #{e.message}")
121
+ raise ConnectionError, "Failed to initialise ActiveRecord : #{e.message}"
117
122
  end
118
123
 
119
124
  else
120
- raise DataShift::PathError.new('No config/environment.rb found - cannot initialise ActiveRecord')
125
+ raise DataShift::PathError, 'No config/environment.rb found - cannot initialise ActiveRecord'
121
126
  end
122
127
  end
123
128
  end
@@ -47,7 +47,7 @@ module DataShift
47
47
 
48
48
  def self.require_libraries
49
49
 
50
- loader_libs = %w(lib)
50
+ loader_libs = %w[lib]
51
51
 
52
52
  # Base search paths - these will be searched recursively
53
53
  loader_paths = []
@@ -70,11 +70,11 @@ module DataShift
70
70
  require_relative 'datashift/loaders/reporters/reporter'
71
71
  require_relative 'datashift/loaders/loader_base'
72
72
  require_relative 'datashift/exporters/exporter_base'
73
- rescue => x
73
+ rescue StandardError => x
74
74
  puts "Problem initializing gem #{x.inspect}"
75
75
  end
76
76
 
77
- require_libs = %w(
77
+ require_libs = %w[
78
78
  datashift/core_ext
79
79
  datashift
80
80
  datashift/mapping
@@ -88,14 +88,14 @@ module DataShift
88
88
  datashift/helpers
89
89
  datashift/applications
90
90
  datashift/populators
91
- )
91
+ ]
92
92
 
93
93
  require_libs.each do |base|
94
94
  Dir[File.join(library_path, base, '*.rb')].each do |rb|
95
95
  # puts rb
96
96
  begin
97
97
  require_relative rb unless File.directory?(rb)
98
- rescue => x
98
+ rescue StandardError => x
99
99
  puts "WARNING - Problem loading datashift file #{rb} - #{x.inspect}"
100
100
  puts x.backtrace
101
101
  end
@@ -128,7 +128,7 @@ module DataShift
128
128
 
129
129
  Dir["#{base}/*.thor"].each do |f|
130
130
  next unless File.file?(f)
131
- load(f)
131
+ Thor::Util.load_thorfile(f)
132
132
  end
133
133
  end
134
134
 
@@ -101,14 +101,16 @@ module DataShift
101
101
 
102
102
  # If klass not in Dictionary yet, add to dictionary all possible operators on klass
103
103
  # which can be used to map headers and populate an object of type klass
104
- model_method_mgr = ModelMethods::Manager.catalog_class(klass)
104
+ model_methods_collection = ModelMethods::Manager.catalog_class(klass)
105
+
106
+ bound = bindings.map(&:source)
105
107
 
106
108
  [*columns].each_with_index do |col_data, col_index|
107
109
  raw_col_data = col_data.to_s.strip
108
110
 
109
111
  if raw_col_data.nil? || raw_col_data.empty?
110
112
  logger.warn("Column list contains empty or null header at index #{col_index}")
111
- bindings << NoMethodBinding.new(raw_col_data, col_index)
113
+ bindings << NoMethodBinding.new(raw_col_data, idx: col_index)
112
114
  next
113
115
  end
114
116
 
@@ -119,25 +121,38 @@ module DataShift
119
121
  #
120
122
  raw_col_name, where_field, where_value, *data = raw_col_data.split(column_delim).map(&:strip)
121
123
 
124
+ # Config loaded details trump internal mappings. User may not bother setting index of the column
125
+ # in config, so attempt now to match it to actual header
126
+ if bound.include?(raw_col_name)
127
+ external = bindings.find { |b| b.source == raw_col_name }
128
+ external.index = col_index if(external && external.index.nil?)
129
+ next
130
+ end
131
+
122
132
  # Find the domain model method details
123
- model_method = model_method_mgr.search(raw_col_name)
133
+ model_method = model_methods_collection.search(raw_col_name)
124
134
 
125
- # No such column, but if config set to include it, for example for delegated methods, add as op type :assignment
135
+ # No such column, so check config
136
+ #
137
+ # Forced inclusion for example for delegated methods that do not show up in reflection.
138
+ #
139
+ # Add as operator type :assignment
140
+ #
126
141
  if( model_method.nil? && (include_all? || forced?(raw_col_name)) )
127
142
  logger.debug("Operator #{raw_col_name} not found but forced inclusion set - adding as :assignment")
128
- model_method = model_method_mgr.insert(raw_col_name, :assignment)
143
+ model_method = model_methods_collection.insert(raw_col_name, :assignment)
129
144
  end
130
145
 
131
146
  unless model_method
132
147
  Binder.substitutions(raw_col_name).each do |n|
133
- model_method = model_method_mgr.search(n)
148
+ model_method = model_methods_collection.search(n)
134
149
  break if model_method
135
150
  end
136
151
  end
137
152
 
138
153
  if(model_method)
139
154
 
140
- binding = MethodBinding.new(raw_col_name, col_index, model_method)
155
+ binding = MethodBinding.new(raw_col_name, model_method, idx: col_index)
141
156
 
142
157
  # we slurped up all possible data in split, turn it back into original string
143
158
  binding.add_column_data(data.join(column_delim))
@@ -147,7 +162,7 @@ module DataShift
147
162
 
148
163
  begin
149
164
  binding.add_lookup(model_method, where_field, where_value)
150
- rescue => e
165
+ rescue StandardError => e
151
166
  logger.error(e.message)
152
167
  add_missing(raw_col_data, col_index, "Field [#{where_field}] Not Found for [#{raw_col_name}] (#{model_method.operator})")
153
168
  next
@@ -167,8 +182,7 @@ module DataShift
167
182
  end
168
183
 
169
184
  def add_bindings_from_nodes( nodes )
170
- logger.debug("Adding [#{nodes.size}] custom bindings")
171
- nodes.each { |n| bindings << n.method_binding }
185
+ nodes.each { |n| bindings << n.method_binding unless n.is_a?(NoMethodBinding) }
172
186
  end
173
187
 
174
188
  # Essentially we map any string collection of field names, not just headers from files
@@ -177,7 +191,7 @@ module DataShift
177
191
  def add_missing(col_data, col_index, reason)
178
192
  logger.warn(reason)
179
193
 
180
- missing = NoMethodBinding.new(col_data, col_index, reason: reason)
194
+ missing = NoMethodBinding.new(col_data, reason: reason, idx: col_index)
181
195
 
182
196
  missing_bindings << missing
183
197
  bindings << missing
@@ -210,6 +224,11 @@ module DataShift
210
224
  bindings.collect( &:operator )
211
225
  end
212
226
 
227
+ # Find a binding, matches raw client supplied names e.g header and has a valid index
228
+ def find_for_source( name )
229
+ bindings.find { |b| b.source == name && b.index }
230
+ end
231
+
213
232
  end
214
233
 
215
234
  end
@@ -11,6 +11,14 @@ module DataShift
11
11
 
12
12
  class Configuration
13
13
 
14
+
15
+ def self.parse_yaml( yaml_file )
16
+ bound_template = ERB.new( IO.read(yaml_file)).result
17
+
18
+ YAML.safe_load(bound_template, [Date, Time, Symbol] )
19
+ end
20
+
21
+
14
22
  # List of association +TYPES+ to INCLUDE [:assignment, :enum, :belongs_to, :has_one, :has_many, :method]
15
23
  # Defaults to [:assignment, :enum]
16
24
  #
@@ -122,11 +130,11 @@ module DataShift
122
130
  attr_accessor :image_path_prefix
123
131
 
124
132
  def self.rails_columns
125
- @rails_standard_columns ||= [:id, :created_at, :created_on, :updated_at, :updated_on]
133
+ @rails_standard_columns ||= %i[id created_at created_on updated_at updated_on]
126
134
  end
127
135
 
128
136
  def initialize
129
- @with = [:assignment, :enum]
137
+ @with = %i[assignment enum]
130
138
  @exclude = []
131
139
  @remove_columns = []
132
140
 
@@ -1,12 +1,12 @@
1
1
  Array.class_eval do
2
2
 
3
- ARRAY_FWDABLE_EXCLUDED_METHODS = [
4
- :class, :singleton_class, :clone, :dup, :initialize_dup, :initialize_clone,
5
- :freeze, :methods, :singleton_methods, :protected_methods, :private_methods, :public_methods,
6
- :instance_variables, :instance_variable_get, :instance_variable_set, :instance_variable_defined?,
7
- :instance_of?, :kind_of?, :is_a?, :tap, :send, :public_send, :respond_to?, :respond_to_missing?,
8
- :extend, :display, :method, :public_method, :define_singleton_method, :object_id, :equal?,
9
- :instance_eval, :instance_exec, :__send__, :__id__
3
+ ARRAY_FWDABLE_EXCLUDED_METHODS = %i[
4
+ class singleton_class clone dup initialize_dup initialize_clone
5
+ freeze methods singleton_methods protected_methods private_methods public_methods
6
+ instance_variables instance_variable_get instance_variable_set instance_variable_defined?
7
+ instance_of? kind_of? is_a? tap send public_send respond_to? respond_to_missing?
8
+ extend display method public_method define_singleton_method object_id equal?
9
+ instance_eval instance_exec __send__ __id__
10
10
  ].freeze
11
11
 
12
12
  def self.delegated_methods_for_fwdable
@@ -72,7 +72,7 @@ module DataShift
72
72
  # Objects can be created with multiple facets in single columns.
73
73
  # In this example a single Product can be configured with a consolidated mime and print types
74
74
  #
75
- # mime_type:jpeg,PDF ; print_type:colour equivalent to
75
+ # mime_type:jpeg,PDF ; print_type:colour equivalent to
76
76
  #
77
77
  # => mime_type:jpeg;print_type:colour | mime_type:PDF; print_type:colour
78
78
 
@@ -119,7 +119,7 @@ module DataShift
119
119
  logger.debug("SAVING #{load_object.class} : #{load_object.inspect}")
120
120
  begin
121
121
  load_object.save!
122
- rescue => e
122
+ rescue StandardError => e
123
123
  logger.error( "Save Error : #{e.inspect} on #{load_object.class}")
124
124
  logger.error(e.backtrace)
125
125
  false
@@ -105,7 +105,7 @@ module DataShift
105
105
 
106
106
  # Pass a set of AR records
107
107
  def ar_to_xls(records, start_row: 1, headers: nil, data_flow_schema: nil)
108
- return if (!exportable?(records.first) || records.empty?)
108
+ return if !exportable?(records.first) || records.empty?
109
109
 
110
110
  # assume header row present
111
111
  row_index = start_row
@@ -140,7 +140,7 @@ module DataShift
140
140
  def ar_to_xls_cell(row_idx, col_idx, record, ar_method)
141
141
  datum = record.send(ar_method)
142
142
  self[row_idx, col_idx] = datum
143
- rescue => e
143
+ rescue StandardError => e
144
144
  logger.error("Failed to export #{datum} from #{ar_method.inspect} to column #{col_idx}")
145
145
  logger.error(e.message)
146
146
  logger.error(e.backtrace)
@@ -100,7 +100,6 @@ module DataShift
100
100
  csv.add_row(row)
101
101
  end
102
102
  end
103
-
104
103
  ensure
105
104
  DataShift::Configuration.call.with = state
106
105
  end # end write file
@@ -43,7 +43,7 @@ module DataShift
43
43
  def export(file, export_records, options = {})
44
44
  records = [*export_records]
45
45
 
46
- if(records.nil? || records.empty?)
46
+ if records.blank?
47
47
  logger.warn('Excel Export - No objects supplied for export - no file written')
48
48
  return
49
49
  end
@@ -108,7 +108,7 @@ module DataShift
108
108
 
109
109
  logger.info("Processing [#{records.size}] #{klass} records to Excel")
110
110
 
111
- # TODO - prepare_data_flow_schema here in middle of export, plus reaching through nodes to klass, does not smell right
111
+ # TODO: - prepare_data_flow_schema here in middle of export, plus reaching through nodes to klass, does not smell right
112
112
  prepare_data_flow_schema(klass) unless @data_flow_schema && @data_flow_schema.nodes.klass == klass
113
113
 
114
114
  export_headers(klass)
@@ -136,7 +136,7 @@ module DataShift
136
136
  else
137
137
  excel[row, column] = obj.send( model_method.operator )
138
138
  end
139
- rescue => x
139
+ rescue StandardError => x
140
140
  logger.error("Failed to write #{model_method.inspect} to Excel")
141
141
  logger.error(x.inspect)
142
142
  end
@@ -149,7 +149,6 @@ module DataShift
149
149
 
150
150
  logger.info("Writing Excel to file [#{file_name}]")
151
151
  excel.write( file_name )
152
-
153
152
  ensure
154
153
  DataShift::Configuration.call.with = state
155
154
 
@@ -136,9 +136,7 @@ module FileDefinitions
136
136
  #
137
137
  def create_fixed_definition( field_range_map )
138
138
 
139
- unless field_range_map.is_a?(Hash)
140
- raise ArgumentError, 'Please supply hash to create_fixed_definition'
141
- end
139
+ raise ArgumentError, 'Please supply hash to create_fixed_definition' unless field_range_map.is_a?(Hash)
142
140
 
143
141
  keys = field_range_map.keys.collect(&:to_s)
144
142
  string_map = Hash[*keys.zip(field_range_map.values).flatten]
@@ -22,7 +22,7 @@ module DataShift
22
22
  attr_reader :inbound_column
23
23
 
24
24
  delegate :source, to: :inbound_column, allow_nil: true
25
- delegate :index, to: :inbound_column, allow_nil: true
25
+ delegate :index, 'index=', to: :inbound_column, allow_nil: true
26
26
 
27
27
  # Is this method detail a valid mapping, aids identifying unmapped/unmappable columns
28
28
  attr_accessor :valid
@@ -36,7 +36,7 @@ module DataShift
36
36
  #
37
37
  # col_types can typically be derived from klass.columns - set of ActiveRecord::ConnectionAdapters::Column
38
38
 
39
- def initialize(name, idx, model_method)
39
+ def initialize(name, model_method, idx: nil)
40
40
  @inbound_column = InboundData::Column.new(name, idx)
41
41
 
42
42
  @model_method = model_method
@@ -130,10 +130,10 @@ module DataShift
130
130
 
131
131
  attr_accessor :reason
132
132
 
133
- def initialize(client_name = '', client_idx = -1, options = {})
134
- super(client_name, client_idx, nil)
133
+ def initialize(client_name = '', reason: nil, idx: 1 )
134
+ super(client_name, nil, idx: idx)
135
135
 
136
- @reason = options[:reason] || ''
136
+ @reason = reason || ''
137
137
  end
138
138
 
139
139
  def invalid?