datashift 0.40.3 → 0.40.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/README.md +7 -2
- data/datashift.thor +28 -23
- data/lib/datashift.rb +6 -6
- data/lib/datashift/binder.rb +30 -11
- data/lib/datashift/configuration.rb +10 -2
- data/lib/datashift/core_ext/array.rb +7 -7
- data/lib/datashift/delimiters.rb +1 -1
- data/lib/datashift/doc_context.rb +1 -1
- data/lib/datashift/excel_base.rb +2 -2
- data/lib/datashift/exporters/csv_exporter.rb +0 -1
- data/lib/datashift/exporters/excel_exporter.rb +3 -4
- data/lib/datashift/file_definitions.rb +1 -3
- data/lib/datashift/inbound_data/method_binding.rb +5 -5
- data/lib/datashift/loaders/csv_loader.rb +2 -3
- data/lib/datashift/loaders/excel_loader.rb +8 -4
- data/lib/datashift/loaders/failure_data.rb +1 -3
- data/lib/datashift/loaders/loader_base.rb +2 -8
- data/lib/datashift/loaders/loader_factory.rb +6 -0
- data/lib/datashift/loaders/paperclip/attachment_loader.rb +1 -1
- data/lib/datashift/loaders/paperclip/datashift_paperclip.rb +2 -2
- data/lib/datashift/loaders/paperclip/image_loading.rb +2 -2
- data/lib/datashift/mapping/data_flow_schema.rb +40 -14
- data/lib/datashift/mapping/mapper_utils.rb +3 -3
- data/lib/datashift/model_methods/catalogue.rb +14 -14
- data/lib/datashift/model_methods/model_method.rb +5 -6
- data/lib/datashift/model_methods/operator.rb +1 -1
- data/lib/datashift/node_context.rb +2 -3
- data/lib/datashift/populators/has_many.rb +2 -2
- data/lib/datashift/populators/insistent_assignment.rb +4 -4
- data/lib/datashift/populators/populator.rb +21 -16
- data/lib/datashift/populators/populator_factory.rb +2 -4
- data/lib/datashift/querying.rb +4 -5
- data/lib/datashift/transformation/factory.rb +3 -3
- data/lib/datashift/version.rb +1 -1
- data/lib/generators/datashift/install_generator.rb +3 -3
- data/lib/generators/templates/datashift.rb +0 -4
- data/lib/tasks/config.thor +7 -9
- data/lib/tasks/export.thor +44 -45
- data/lib/tasks/generate.thor +43 -37
- data/lib/tasks/import.thor +20 -14
- data/lib/tasks/paperclip.thor +46 -48
- data/lib/tasks/thor_behaviour.rb +1 -1
- data/lib/tasks/to_convert_to_thor/db_tasks.rake +1 -3
- data/lib/tasks/tools.thor +37 -38
- metadata +45 -45
@@ -77,7 +77,7 @@ module DataShift
|
|
77
77
|
|
78
78
|
begin
|
79
79
|
context.process
|
80
|
-
rescue => x
|
80
|
+
rescue StandardError => x
|
81
81
|
if doc_context.all_or_nothing?
|
82
82
|
logger.error('Complete Row aborted - All or nothing set and Current Column failed.')
|
83
83
|
logger.error(x.backtrace.first.inspect)
|
@@ -97,8 +97,7 @@ module DataShift
|
|
97
97
|
raise ActiveRecord::Rollback # Don't actually create/upload to DB if we are doing dummy run
|
98
98
|
end
|
99
99
|
end # TRANSACTION N.B ActiveRecord::Rollback does not propagate outside of the containing transaction block
|
100
|
-
|
101
|
-
rescue => e
|
100
|
+
rescue StandardError => e
|
102
101
|
puts "ERROR: CSV loading failed : #{e.inspect}"
|
103
102
|
raise e
|
104
103
|
ensure
|
@@ -66,13 +66,18 @@ module DataShift
|
|
66
66
|
|
67
67
|
@binder.bindings.each do |method_binding|
|
68
68
|
|
69
|
+
# TODO: - how does this get inserted - bind headers ?? ignore if no index
|
70
|
+
# #<DataShift::MethodBinding:0x0000000003e26280 @inbound_column=#<DataShift::InboundData::Column:0x0000000003e26258 @header=#<DataShift::Header:0x0000000003e26190 @source="audio", @presentation="audio">, @index=nil,
|
71
|
+
next if method_binding.index.nil?
|
72
|
+
|
69
73
|
unless method_binding.valid?
|
70
74
|
logger.warn("No binding was found for column (#{current_row_idx})")
|
71
75
|
next
|
72
76
|
end
|
73
77
|
|
74
78
|
# If binding to a column, get the value from the cell (bindings can be to internal methods)
|
75
|
-
|
79
|
+
#
|
80
|
+
value = row[method_binding.index]
|
76
81
|
|
77
82
|
context = doc_context.create_node_context(method_binding, current_row_idx, value)
|
78
83
|
|
@@ -82,7 +87,7 @@ module DataShift
|
|
82
87
|
|
83
88
|
begin
|
84
89
|
context.process
|
85
|
-
rescue
|
90
|
+
rescue StandardError
|
86
91
|
if doc_context.all_or_nothing?
|
87
92
|
logger.error('All or nothing set and Current Column failed so complete Row aborted')
|
88
93
|
break
|
@@ -105,8 +110,7 @@ module DataShift
|
|
105
110
|
raise ActiveRecord::Rollback # Don't actually create/upload to DB if we are doing dummy run
|
106
111
|
end
|
107
112
|
end # TRANSACTION N.B ActiveRecord::Rollback does not propagate outside of the containing transaction block
|
108
|
-
|
109
|
-
rescue => e
|
113
|
+
rescue StandardError => e
|
110
114
|
puts "ERROR: Excel loading failed : #{e.inspect}"
|
111
115
|
raise e
|
112
116
|
ensure
|
@@ -31,9 +31,7 @@ module DataShift
|
|
31
31
|
|
32
32
|
def destroy_failed_object
|
33
33
|
if load_object
|
34
|
-
if load_object.respond_to?('destroy') && !load_object.new_record?
|
35
|
-
load_object.destroy
|
36
|
-
end
|
34
|
+
load_object.destroy if load_object.respond_to?('destroy') && !load_object.new_record?
|
37
35
|
end
|
38
36
|
end
|
39
37
|
|
@@ -89,11 +89,9 @@ module DataShift
|
|
89
89
|
|
90
90
|
logger.info("Binding #{headers.size} inbound headers to #{load_object_class.name}")
|
91
91
|
|
92
|
-
@binder ||= DataShift::Binder.new
|
93
|
-
|
94
92
|
begin
|
95
93
|
binder.map_inbound_headers(load_object_class, headers)
|
96
|
-
rescue => e
|
94
|
+
rescue StandardError => e
|
97
95
|
logger.error("Failed to map header row to set of database operators : #{e.inspect}")
|
98
96
|
logger.error( e.backtrace )
|
99
97
|
raise MappingDefinitionError, 'Failed to map header row to set of database operators'
|
@@ -141,14 +139,10 @@ module DataShift
|
|
141
139
|
|
142
140
|
logger.info("Reading Datashift loader config from: #{yaml_file.inspect}")
|
143
141
|
|
144
|
-
data =
|
142
|
+
data = Configuration.parse_yaml(yaml_file)
|
145
143
|
|
146
144
|
logger.info("Read Datashift config: #{data.inspect}")
|
147
145
|
|
148
|
-
@config.merge!(data['LoaderBase']) if data['LoaderBase']
|
149
|
-
|
150
|
-
@config.merge!(data[self.class.name]) if data[self.class.name]
|
151
|
-
|
152
146
|
@binder ||= DataShift::Binder.new
|
153
147
|
|
154
148
|
data_flow_schema = DataShift::DataFlowSchema.new
|
@@ -16,6 +16,12 @@ module DataShift
|
|
16
16
|
|
17
17
|
class Factory
|
18
18
|
|
19
|
+
# organizer
|
20
|
+
|
21
|
+
# include Interactor::Organizer
|
22
|
+
|
23
|
+
# organize DetermineLoaderKlass, BuildKlassCatalog, LoadConfig, ApplyConfig
|
24
|
+
|
19
25
|
# Based on file_name find appropriate Loader
|
20
26
|
|
21
27
|
# Currently supports :
|
@@ -80,7 +80,7 @@ module DataShift
|
|
80
80
|
logger.info("Finding matching field/association [#{attach_to_field}] on class [#{attach_to_klass}]")
|
81
81
|
|
82
82
|
binder.map_inbound_fields(attach_to_klass, attach_to_field)
|
83
|
-
rescue => e
|
83
|
+
rescue StandardError => e
|
84
84
|
logger.error("Failed to map #{attach_to_field} to database operator : #{e.inspect}")
|
85
85
|
logger.error( e.backtrace )
|
86
86
|
raise MappingDefinitionError, 'Failed to map #{attach_to_field} to database operator'
|
@@ -40,7 +40,7 @@ module DataShift
|
|
40
40
|
|
41
41
|
file = begin
|
42
42
|
File.new(attachment_path, 'rb')
|
43
|
-
rescue => e
|
43
|
+
rescue StandardError => e
|
44
44
|
logger.error(e.inspect)
|
45
45
|
raise PathError, "ERROR : Failed to read image from #{attachment_path}"
|
46
46
|
end
|
@@ -89,7 +89,7 @@ module DataShift
|
|
89
89
|
logger.info("Create paperclip attachment on Class #{klass} - #{paperclip_attributes}")
|
90
90
|
|
91
91
|
@attachment = klass.new(paperclip_attributes)
|
92
|
-
rescue => e
|
92
|
+
rescue StandardError => e
|
93
93
|
logger.error(e.backtrace.first)
|
94
94
|
raise CreateAttachmentFailed, "Failed [#{e.message}] - Creating Attachment [#{attachment_path}] on #{klass}"
|
95
95
|
ensure
|
@@ -54,7 +54,7 @@ module DataShift
|
|
54
54
|
# Set of file extensions ImageMagik can process so default glob
|
55
55
|
# we use to find image files within directories
|
56
56
|
def self.image_magik_glob
|
57
|
-
@im_glob ||= %w
|
57
|
+
@im_glob ||= %w[*.3FR *.AAI *.AI *.ART *.ARW *.AVI *.AVS *.BGR *.BGRA
|
58
58
|
*.BIE *.BMP *.BMP2 *.BMP3 *.BRF *.CAL *.CALS *.CANVAS
|
59
59
|
*.CIN *.CIP *.CLIP *.CMYK *.CMYKA *.CR2 *.CRW *.CUR *.CUT *.DCM *.DCR *.DCX
|
60
60
|
*.DDS *.DFONT *.DJVU *.DNG *.DOT *.DPS *.DPX
|
@@ -70,7 +70,7 @@ module DataShift
|
|
70
70
|
*.RGBA *.RGBO *.RLA *.RLE *.SCR *.SCT *.SFW *.SGI *.SR2 *.SRF
|
71
71
|
*.SUN *.SVG *.SVGZ *.TGA *.TIFF *.TIFF64 *.TILE *.TIM *.TTC *.TTF *.UBRL *.UIL *.UYVY *.VDA *.VICAR
|
72
72
|
*.VID *.VIFF *.VST *.WBMP *.WEBP *.WMF *.WMV *.WMZ *.WPG *.X3F
|
73
|
-
*.XBM *.XC *.XCF *.XPM *.XPS *.XV *.XWD *.YCbCr *.YCbCrA *.YUV
|
73
|
+
*.XBM *.XC *.XCF *.XPM *.XPS *.XV *.XWD *.YCbCr *.YCbCrA *.YUV]
|
74
74
|
"{#{@im_glob.join(',')}}"
|
75
75
|
end
|
76
76
|
end
|
@@ -56,12 +56,12 @@ module DataShift
|
|
56
56
|
end
|
57
57
|
|
58
58
|
# @headers=
|
59
|
-
|
59
|
+
# <DataShift::Header:0x00000004bc37f8
|
60
60
|
# @presentation="status_str",
|
61
61
|
# @source="status_str">],
|
62
62
|
|
63
63
|
def headers
|
64
|
-
# TODO fix doc context so it can be created 'empty' i.e without AR klass, and always has empty headers
|
64
|
+
# TODO: fix doc context so it can be created 'empty' i.e without AR klass, and always has empty headers
|
65
65
|
@nodes.doc_context.try(:headers) || []
|
66
66
|
end
|
67
67
|
|
@@ -83,11 +83,11 @@ module DataShift
|
|
83
83
|
@nodes = create_node_collections(klass, doc_context: doc_context)
|
84
84
|
|
85
85
|
klass_to_model_methods( klass ).each_with_index do |mm, i|
|
86
|
-
@nodes.headers.add(mm.operator)
|
86
|
+
@nodes.headers.add(mm.operator) # for a class, the header names, default to the operators (methods)
|
87
87
|
|
88
|
-
binding = MethodBinding.new(mm.operator,
|
88
|
+
binding = MethodBinding.new(mm.operator, mm, idx: i)
|
89
89
|
|
90
|
-
# TODO - do we really need to pass in the doc context when parent nodes already has it ?
|
90
|
+
# TODO: - do we really need to pass in the doc context when parent nodes already has it ?
|
91
91
|
@nodes << DataShift::NodeContext.new(@nodes.doc_context, binding, i, nil)
|
92
92
|
end
|
93
93
|
|
@@ -121,17 +121,19 @@ module DataShift
|
|
121
121
|
#
|
122
122
|
# See Config generation or lib/datashift/templates/import_export_config.erb for full syntax
|
123
123
|
#
|
124
|
-
|
125
|
-
|
124
|
+
# Returns DataShift::NodeCollection
|
125
|
+
#
|
126
|
+
def prepare_from_file(yaml_file, locale_key = 'data_flow_schema')
|
127
|
+
@raw_data = ERB.new(File.read(yaml_file)).result
|
126
128
|
|
127
|
-
yaml =
|
129
|
+
yaml = Configuration.parse_yaml(yaml_file)
|
128
130
|
|
129
131
|
prepare_from_yaml(yaml, locale_key)
|
130
132
|
end
|
131
133
|
|
132
134
|
def prepare_from_string(text, locale_key = 'data_flow_schema')
|
133
135
|
@raw_data = text
|
134
|
-
yaml = YAML.
|
136
|
+
yaml = YAML.safe_load(raw_data)
|
135
137
|
|
136
138
|
prepare_from_yaml(yaml, locale_key)
|
137
139
|
end
|
@@ -144,6 +146,23 @@ module DataShift
|
|
144
146
|
|
145
147
|
locale_section = yaml[locale_key]
|
146
148
|
|
149
|
+
if(locale_section.key?('Global'))
|
150
|
+
global_nodes = locale_section.delete('Global')
|
151
|
+
|
152
|
+
[*global_nodes].each do |c|
|
153
|
+
|
154
|
+
# TODO: what is c ? a list or hash ?
|
155
|
+
# if DataShift::Configuration.call.respond_to #{c}=
|
156
|
+
# Set the global value e.g
|
157
|
+
# DataShift::Configuration.call.force_inclusion_of_columns = [:audio]
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
unless locale_section.keys.present?
|
162
|
+
logger.warn('No class related configuration found in YAML syntax- Nothing to process')
|
163
|
+
return DataShift::NodeCollection.new
|
164
|
+
end
|
165
|
+
|
147
166
|
class_name = locale_section.keys.first
|
148
167
|
|
149
168
|
klass = MapperUtils.class_from_string_or_raise(class_name)
|
@@ -189,7 +208,7 @@ module DataShift
|
|
189
208
|
|
190
209
|
section = keyed_node.values.first || {}
|
191
210
|
|
192
|
-
# TODO - layout with heading is verbose for no benefit - defunct, simply node.source, node.presentation
|
211
|
+
# TODO: - layout with heading is verbose for no benefit - defunct, simply node.source, node.presentation
|
193
212
|
source = section.fetch('heading', {}).fetch('source', nil)
|
194
213
|
|
195
214
|
# Unless a specific source mentioned assume the node is the source
|
@@ -201,19 +220,26 @@ module DataShift
|
|
201
220
|
|
202
221
|
if(section['operator'])
|
203
222
|
# Find the domain model method details
|
223
|
+
# byebug
|
204
224
|
model_method = model_method_mgr.search(section['operator'])
|
205
225
|
|
206
226
|
unless model_method
|
207
227
|
operator_type = section['operator_type'] || :method
|
208
228
|
|
209
|
-
# expect one of ModelMethod.supported_types_enum
|
229
|
+
# TODO: validate type ? guess we expect one of ModelMethod.supported_types_enum
|
210
230
|
model_method = model_method_mgr.insert(section['operator'], operator_type)
|
231
|
+
# TODO: - This index could be hard coded by the user in YAML or we try to derive it from the headers
|
232
|
+
# byebug
|
233
|
+
method_binding = MethodBinding.new(source, model_method, idx: section['index'])
|
211
234
|
end
|
212
|
-
|
213
|
-
method_binding = InternalMethodBinding.new(model_method)
|
214
235
|
end
|
215
236
|
|
216
|
-
|
237
|
+
# Now ensure we bind source/header(and index) to the method tht performs assignment of inbound datum to the model
|
238
|
+
#
|
239
|
+
# TOFIX - This is a bug waiting to happen right ? i is not coming from the headers
|
240
|
+
# so chances are user hasn't made config indexed as per headers
|
241
|
+
# index could be hard coded by the user in YAML or we try to derive it from the headers via the binder ???
|
242
|
+
method_binding ||= MethodBinding.new(source, model_method, idx: i)
|
217
243
|
|
218
244
|
node_context = DataShift::NodeContext.new(@nodes.doc_context, method_binding, i, nil)
|
219
245
|
|
@@ -46,8 +46,8 @@ module DataShift
|
|
46
46
|
def self.class_from_string( str )
|
47
47
|
|
48
48
|
MapperUtils.const_get_from_string(str.to_s) # Kernel.const_get(model)
|
49
|
-
rescue
|
50
|
-
|
49
|
+
rescue StandardError
|
50
|
+
nil
|
51
51
|
|
52
52
|
end
|
53
53
|
|
@@ -58,7 +58,7 @@ module DataShift
|
|
58
58
|
begin
|
59
59
|
DataShift::MapperUtils.class_from_string(find_table)
|
60
60
|
rescue LoadError
|
61
|
-
rescue
|
61
|
+
rescue StandardError
|
62
62
|
nil
|
63
63
|
end
|
64
64
|
end
|
@@ -61,19 +61,17 @@ module DataShift
|
|
61
61
|
|
62
62
|
# Find the has_one associations which can be populated via Model.has_one_name = OtherArModelObject
|
63
63
|
if options[:reload] || has_one[klass].nil?
|
64
|
-
if Module.const_defined?(:Mongoid)
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
64
|
+
has_one[klass] = if Module.const_defined?(:Mongoid)
|
65
|
+
klass.reflect_on_all_associations(:embeds_one).map { |i| i.name.to_s }
|
66
|
+
else
|
67
|
+
klass.reflect_on_all_associations(:has_one).map { |i| i.name.to_s }
|
68
|
+
end
|
69
69
|
end
|
70
70
|
|
71
71
|
# Find the model's column associations which can be populated via xxxxxx= value
|
72
72
|
# Note, not all reflections return method names in same style so we convert all to
|
73
73
|
# the raw form i.e without the '=' for consistency
|
74
|
-
if options[:reload] || assignments[klass].nil?
|
75
|
-
build_assignments( klass, options[:instance_methods] )
|
76
|
-
end
|
74
|
+
build_assignments( klass, options[:instance_methods] ) if options[:reload] || assignments[klass].nil?
|
77
75
|
end
|
78
76
|
|
79
77
|
def self.clear
|
@@ -155,7 +153,7 @@ module DataShift
|
|
155
153
|
def build_assignments(klass, include_instance_methods)
|
156
154
|
begin
|
157
155
|
assignments[klass] = Catalogue.column_names(klass)
|
158
|
-
rescue => x
|
156
|
+
rescue StandardError => x
|
159
157
|
raise DataShiftException, "Failed to process column_names for class #{klass} - #{x.message}"
|
160
158
|
end
|
161
159
|
|
@@ -172,11 +170,13 @@ module DataShift
|
|
172
170
|
|
173
171
|
assignments[klass].uniq!
|
174
172
|
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
173
|
+
unless Module.const_defined?(:Mongoid)
|
174
|
+
assignments[klass].each do |assign|
|
175
|
+
column_types[klass] ||= {}
|
176
|
+
column_def = klass.columns.find { |col| col.name == assign }
|
177
|
+
column_types[klass].merge!( assign => column_def) if column_def
|
178
|
+
end
|
179
|
+
end
|
180
180
|
end
|
181
181
|
|
182
182
|
def catalogued
|
@@ -4,7 +4,7 @@
|
|
4
4
|
# License:: MIT
|
5
5
|
#
|
6
6
|
# Details:: This class holds info on a single Method callable on a domain Model
|
7
|
-
# By holding information on the Type inbound data can be manipulated
|
7
|
+
# By holding information on the Type, inbound data can be manipulated
|
8
8
|
# into the right format for the style of operator; simple assignment,
|
9
9
|
# appending to an association collection or a method call
|
10
10
|
#
|
@@ -17,7 +17,7 @@ module DataShift
|
|
17
17
|
include DataShift::Logging
|
18
18
|
|
19
19
|
def self.association_types_enum
|
20
|
-
@assoc_type_enum ||= [
|
20
|
+
@assoc_type_enum ||= %i[belongs_to has_one has_many]
|
21
21
|
@assoc_type_enum
|
22
22
|
end
|
23
23
|
|
@@ -45,7 +45,7 @@ module DataShift
|
|
45
45
|
|
46
46
|
# Note : Not all assignments will currently have a column type, for example
|
47
47
|
# those that are derived from a delegate_belongs_to
|
48
|
-
keys =
|
48
|
+
keys = Module.const_defined?(:Mongoid) ? klass.fields.keys : klass.columns.map(&:name)
|
49
49
|
@connection_adapter_column = keys.find { |col| col == operator } if connection_adapter_column.nil?
|
50
50
|
|
51
51
|
@connection_adapter_column = DataShift::ModelMethods::Catalogue.column_type_for(klass, operator) if connection_adapter_column.nil?
|
@@ -144,12 +144,11 @@ module DataShift
|
|
144
144
|
|
145
145
|
if result.nil?
|
146
146
|
begin
|
147
|
-
|
148
147
|
first = klass.to_s.split('::').first
|
149
148
|
logger.debug "Trying to find operator class with Parent Namespace #{first}"
|
150
149
|
|
151
150
|
result = MapperUtils.const_get_from_string("#{first}::#{operator.classify}")
|
152
|
-
rescue => e
|
151
|
+
rescue StandardError => e
|
153
152
|
logger.error("Failed to derive Class for #{operator} (#{@operator_type} - #{e.inspect}")
|
154
153
|
end
|
155
154
|
end
|
@@ -159,7 +158,7 @@ module DataShift
|
|
159
158
|
elsif connection_adapter_column
|
160
159
|
begin
|
161
160
|
Kernel.const_get(connection_adapter_column.type.to_s.classify)
|
162
|
-
rescue
|
161
|
+
rescue StandardError
|
163
162
|
nil
|
164
163
|
end
|
165
164
|
end
|
@@ -18,7 +18,7 @@ module DataShift
|
|
18
18
|
# N.B these are in priority order ie. often prefer to process assignments first, then associations
|
19
19
|
#
|
20
20
|
def self.supported_types_enum
|
21
|
-
@type_enum ||= [
|
21
|
+
@type_enum ||= %i[assignment enum belongs_to has_one has_many method paperclip]
|
22
22
|
@type_enum
|
23
23
|
end
|
24
24
|
|
@@ -44,8 +44,7 @@ module DataShift
|
|
44
44
|
|
45
45
|
def process
|
46
46
|
populator.prepare_and_assign(self, doc_context.load_object, data)
|
47
|
-
rescue => x
|
48
|
-
|
47
|
+
rescue StandardError => x
|
49
48
|
failed = FailureData.new( doc_context.load_object, self, x.message)
|
50
49
|
|
51
50
|
failed.error_messages << "Failed to process node : #{method_binding.pp}"
|
@@ -61,7 +60,7 @@ module DataShift
|
|
61
60
|
class EmptyContext < NodeContext
|
62
61
|
|
63
62
|
def initialize
|
64
|
-
super(NilClass, DataShift::NoMethodBinding.new, -1
|
63
|
+
super(NilClass, DataShift::NoMethodBinding.new, [], idx: -1)
|
65
64
|
end
|
66
65
|
end
|
67
66
|
|
@@ -80,7 +80,7 @@ module DataShift
|
|
80
80
|
find_by_values.each do |v|
|
81
81
|
begin
|
82
82
|
found_values << klass.where(field => v).first_or_create
|
83
|
-
rescue => e
|
83
|
+
rescue StandardError => e
|
84
84
|
logger.error(e.inspect)
|
85
85
|
logger.error("Failed to find or create #{klass} where #{field} => #{v}")
|
86
86
|
# TODO: some way to define if this is a fatal error or not ?
|
@@ -100,7 +100,7 @@ module DataShift
|
|
100
100
|
|
101
101
|
begin
|
102
102
|
load_object.send(operator) << found_values
|
103
|
-
rescue => e
|
103
|
+
rescue StandardError => e
|
104
104
|
logger.error e.inspect
|
105
105
|
logger.error "Cannot assign #{found_values.inspect} to has_many [#{operator}] "
|
106
106
|
end
|
@@ -19,13 +19,13 @@ module DataShift
|
|
19
19
|
extend DataShift::Logging
|
20
20
|
|
21
21
|
def self.insistent_method_list
|
22
|
-
@insistent_method_list ||= [
|
22
|
+
@insistent_method_list ||= %i[to_s downcase to_i to_f to_b]
|
23
23
|
end
|
24
24
|
|
25
25
|
# When looking up an association, when no field provided, try each of these in turn till a match
|
26
26
|
# i.e find_by_name, find_by_title, find_by_id
|
27
27
|
def self.insistent_find_by_list
|
28
|
-
@insistent_find_by_list ||= [
|
28
|
+
@insistent_find_by_list ||= %i[name title id]
|
29
29
|
end
|
30
30
|
|
31
31
|
def self.call(record, value, operator)
|
@@ -49,7 +49,7 @@ module DataShift
|
|
49
49
|
InsistentAssignment.insistent_method_list.each do |f|
|
50
50
|
begin
|
51
51
|
return if(attempt(record, value.send(f), method))
|
52
|
-
rescue
|
52
|
+
rescue StandardError
|
53
53
|
end
|
54
54
|
end
|
55
55
|
end
|
@@ -64,7 +64,7 @@ module DataShift
|
|
64
64
|
def attempt(record, value, operator)
|
65
65
|
begin
|
66
66
|
record.send(operator, value)
|
67
|
-
rescue
|
67
|
+
rescue StandardError
|
68
68
|
logger.debug("Brute forced failed for [#{operator}, #{value}]")
|
69
69
|
return false
|
70
70
|
end
|