mass_record 0.0.3.2 → 0.0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/mass_record.rb +98 -26
- data/lib/mass_record/version.rb +1 -1
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 519bc877db9efb22dd6802ab3a70f47c137cf453
|
4
|
+
data.tar.gz: d97a25596664645111adc86a81177e6f1d95b10e
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 81556aec165ea40b8da20faaafba2dc47606b74405b586de5f68cb423c4976a8143383e38013e556a83bd18b8a364c5ae7dc52a3c57fc17ab769c9fcf8ce5278
|
7
|
+
data.tar.gz: 4920226f001443c9cb5ae72207955b5511f8d82ab746956fae0c489283b078343e5452fb8bcf649cf9b2210d18ac42cdb93488e79157cfbe84cf044210abdbfb
|
data/lib/mass_record.rb
CHANGED
@@ -1,22 +1,28 @@
|
|
1
1
|
require "mass_record/engine"
|
2
2
|
|
3
3
|
module MassRecord
|
4
|
-
mattr_accessor :path, :folder_path, :database_connection
|
4
|
+
mattr_accessor :path, :folder_path, :database_connection, :logger, :individual_count, :mass_count
|
5
5
|
self.path = {}
|
6
6
|
self.folder_path = "tmp/#{Rails.env}"
|
7
7
|
self.path[:queries] = "tmp/#{Rails.env}"
|
8
8
|
self.path[:queued_queries] = "tmp/#{Rails.env}"
|
9
9
|
self.path[:errored_queries] = "tmp/#{Rails.env}"
|
10
10
|
self.path[:completed_queries] = "tmp/#{Rails.env}"
|
11
|
-
|
11
|
+
logger = Logger.new STDOUT
|
12
12
|
|
13
13
|
module Actions
|
14
|
+
include ActionView::Helpers::TextHelper
|
15
|
+
mattr_accessor :individual_count, :mass_count
|
16
|
+
attr_accessor :individual_count, :mass_count
|
14
17
|
path = {}
|
15
18
|
folder_path = "tmp/#{Rails.env}"
|
16
19
|
path[:queries] = "#{folder_path}/queries"
|
17
20
|
path[:queued_queries] = "#{path[:queries]}/queued"
|
18
21
|
path[:errored_queries] = "#{path[:queries]}/errored"
|
19
22
|
path[:completed_queries] = "#{path[:queries]}/completed"
|
23
|
+
logger = Logger.new STDOUT
|
24
|
+
self.individual_count = 0
|
25
|
+
self.mass_count = 0
|
20
26
|
|
21
27
|
class IndividualError < Exception
|
22
28
|
attr_accessor :operation,:table,:json_object,:original_exception,:backtrace,:backtrace_locations,:cause,:exception,:message
|
@@ -81,6 +87,7 @@ module MassRecord
|
|
81
87
|
errored:path[:errored_queries],
|
82
88
|
completed:path[:completed_queries]
|
83
89
|
},file_tag:Time.now.strftime("%Y%m%d%H%M%S%L").to_s
|
90
|
+
self.individual_count = self.mass_count = 0
|
84
91
|
|
85
92
|
files = Dir.foreach(folder[:queued]).collect{|x| x}.keep_if{|y|y=~/\.json$/i}
|
86
93
|
json_objects = []
|
@@ -98,13 +105,12 @@ module MassRecord
|
|
98
105
|
|
99
106
|
# validate all objects
|
100
107
|
validation_results = mass_validate json_objects
|
108
|
+
logger.debug "#{validation_results[:passed_orders].count} valid objects of #{json_objects.count} total objects".black.on_white
|
101
109
|
json_objects = validation_results[:passed_orders]
|
102
110
|
|
103
111
|
# get all operations and tables in use
|
104
112
|
operations = json_objects.collect{|x| x[key[:operation]].to_sym}.to_set.to_a
|
105
|
-
|
106
|
-
# open database connection
|
107
|
-
database_connection = ActiveRecord::Base.connection
|
113
|
+
logger.debug "Operations: #{operations.pretty_inspect}".black.on_white
|
108
114
|
|
109
115
|
# construct mass queries
|
110
116
|
errors = {}
|
@@ -121,22 +127,60 @@ module MassRecord
|
|
121
127
|
end
|
122
128
|
end
|
123
129
|
|
124
|
-
#
|
125
|
-
|
126
|
-
# move to appropriate folder and remove '.processing' from the filename
|
130
|
+
# Collect mass errors and the associated objects
|
127
131
|
errors_present = errors.any?{|op,tables| tables.has_key? :run_time or tables.any?{|table,col_sets| !col_sets.blank?}}
|
128
132
|
errored_objects = collect_errored_objects found_in:errors, from:json_objects, key:key, synonyms:synonyms if errors_present
|
129
133
|
|
134
|
+
# Retry objects from the failed queries on an individual query basis
|
130
135
|
individual_errors = errors_present ? (query_per_object errored_objects, key:key, synonyms:synonyms) : []
|
131
|
-
database_connection.close
|
132
|
-
|
133
|
-
files = Dir.foreach(folder[:queued]).collect{|x| x}.keep_if{|y|y=~/\.json\.processing$/i}
|
134
|
-
files.each{|x| File.rename "#{folder[:queued]}/#{x}","#{errors_present ? folder[:errored] : folder[:completed]}/group_#{file_tag}_#{x.gsub /\.processing$/,''}"}
|
135
136
|
|
137
|
+
# Collect individual errors and their associated objects with the option for custom handling
|
138
|
+
individually_errored_objects = collect_individually_errored_objects from:errored_objects, based_on:individual_errors, key:key
|
136
139
|
individual_errors += (collect_run_time_errors found_in:errors) + validation_results[:failed_orders]
|
140
|
+
default_error_handling = handle_individual_errors_callback errors:individual_errors, errored_objects:individually_errored_objects, all_objects:json_objects
|
141
|
+
|
142
|
+
# Save failed objects, archive all objects, and log out a summary
|
143
|
+
if default_error_handling
|
144
|
+
# Save a new file with just the errored objects in the errored folder
|
145
|
+
# (which will be all the objects if there is not a 1 to 1 ratio between the errors and errored objects)
|
146
|
+
# THEN save a new file with ALL the objects in the completed folder
|
147
|
+
if json_objects.count > 0
|
148
|
+
if individual_errors.count == individually_errored_objects.count
|
149
|
+
File.open("#{folder[:errored]}/errored_only_#{file_tag}.json",'w'){|f| f.write individually_errored_objects.to_json} if individual_errors.count > 0
|
150
|
+
File.open("#{folder[:completed]}/#{file_tag}.json",'w'){|f| f.write json_objects.to_json}
|
151
|
+
else
|
152
|
+
File.open("#{folder[:errored]}/all_#{file_tag}.json",'w'){|f| f.write json_objects.to_json}
|
153
|
+
end
|
154
|
+
end
|
155
|
+
|
156
|
+
# Delete all the original files
|
157
|
+
file_names = files.collect{|x| "#{folder[:queued]}/#{x}.processing"}
|
158
|
+
File.delete(*file_names)
|
159
|
+
|
160
|
+
# Log out a summary of what happened
|
161
|
+
logger.info "\nProcessed #{pluralize((json_objects.count),'object')} with #{pluralize((individual_errors.count),'error')}".black.on_white
|
162
|
+
logger.info "\tMass Queries:\t\t#{self.mass_count} for #{pluralize((json_objects.count - errored_objects.count),'object')}\n\tRecovery Queries:\t#{self.individual_count} for #{pluralize(errored_objects.count,'object')}\n\tErrors:\t\t\t#{individual_errors.count}".black.on_white if individual_errors.count > 0 or logger.debug?
|
163
|
+
individual_errors.each_with_index{|x,i| logger.info "\t\t(#{i}) #{x.to_s[0..90]}...".black.on_white} if individual_errors.count > 0 or logger.debug?
|
164
|
+
end
|
137
165
|
return individual_errors
|
138
166
|
end
|
139
167
|
|
168
|
+
def handle_individual_errors_callback errors:[], errored_objects:[], all_objects:[]
|
169
|
+
# TODO: must be manually overidden. Assumes a true return value means to use the engines default error handling and logging, and a false return value means to skip all subsequent actions
|
170
|
+
return true
|
171
|
+
end
|
172
|
+
|
173
|
+
def collect_individually_errored_objects from:[], based_on:[], key:{}
|
174
|
+
individuals = []
|
175
|
+
based_on.each do |error|
|
176
|
+
if error.is_a? IndividualError and !error.json_object.blank?
|
177
|
+
errored_object = from.select{|object| object[key[:table]] === error.table and object[key[:operation]] === error.operation and object[key[:object]] === error.json_object }.first
|
178
|
+
individuals << errored_object unless errored_object.blank?
|
179
|
+
end
|
180
|
+
end
|
181
|
+
return individuals
|
182
|
+
end
|
183
|
+
|
140
184
|
def collect_run_time_errors found_in:{}, loop_limit:10
|
141
185
|
return [] if found_in.blank?
|
142
186
|
run_time_errors = []
|
@@ -181,6 +225,7 @@ module MassRecord
|
|
181
225
|
end
|
182
226
|
|
183
227
|
def query_per_object objects, key:{}, synonyms:{}
|
228
|
+
logger.info "Executing #{objects.count} individual queries...".black.on_white
|
184
229
|
# get all operations and tables in use
|
185
230
|
operations = objects.collect{|x| x[key[:operation]].to_sym}.to_set.to_a
|
186
231
|
|
@@ -304,7 +349,8 @@ module MassRecord
|
|
304
349
|
h = hash.clone # use a copy of hash, so it doesn't change the original data
|
305
350
|
|
306
351
|
# assemble an individual query
|
307
|
-
im = Arel::InsertManager.new(ActiveRecord::Base)
|
352
|
+
# im = Arel::InsertManager.new(ActiveRecord::Base)
|
353
|
+
im = Arel::InsertManager.new(model)
|
308
354
|
unless id_column_name.is_a? Array # don't modify the id fields if there are concatenated primary keys
|
309
355
|
database_column = model.columns.select{|x| x.name == id_column_name}.first
|
310
356
|
h.delete id_column_name if h[id_column_name].blank? or (database_column.methods.include? :extra and database_column.extra == 'auto_increment')
|
@@ -329,7 +375,8 @@ module MassRecord
|
|
329
375
|
h = convert_to_db_format h, model:model, created_at:created_at, updated_at:updated_at
|
330
376
|
|
331
377
|
# assemble an individual query
|
332
|
-
um = Arel::UpdateManager.new(ActiveRecord::Base)
|
378
|
+
# um = Arel::UpdateManager.new(ActiveRecord::Base)
|
379
|
+
um = Arel::UpdateManager.new(model)
|
333
380
|
um.where(t[id_column_name.to_sym].eq(h[id_column_name])) unless id_column_name.is_a? Array
|
334
381
|
id_column_name.each{|key| um.where t[key.to_sym].eq(h[key])} if id_column_name.is_a? Array
|
335
382
|
um.table(t)
|
@@ -345,6 +392,8 @@ module MassRecord
|
|
345
392
|
def update hashes, into:nil
|
346
393
|
begin
|
347
394
|
return false if hashes.blank? or into.blank?
|
395
|
+
|
396
|
+
logger.debug "Update #{into.to_s}>".black.on_white
|
348
397
|
hashes = [hashes] unless hashes.is_a? Array
|
349
398
|
model = get_model from:into
|
350
399
|
|
@@ -355,8 +404,11 @@ module MassRecord
|
|
355
404
|
|
356
405
|
begin
|
357
406
|
query sql, connection:model
|
407
|
+
logger << ".".black.on_white if logger.debug?
|
408
|
+
self.individual_count += 1
|
358
409
|
rescue Exception => e
|
359
|
-
|
410
|
+
logger.debug e.message
|
411
|
+
logger.info e.message.to_s[0..1000]
|
360
412
|
errors << IndividualError.new(e,table:into,operation:"update",json_object:hash)
|
361
413
|
end
|
362
414
|
end
|
@@ -369,6 +421,8 @@ module MassRecord
|
|
369
421
|
def insert hashes, into:nil
|
370
422
|
begin
|
371
423
|
return false if hashes.blank? or into.blank?
|
424
|
+
|
425
|
+
logger.debug "Insert #{into.to_s}>".black.on_white
|
372
426
|
hashes = [hashes] unless hashes.is_a? Array
|
373
427
|
model = get_model from:into
|
374
428
|
|
@@ -376,11 +430,13 @@ module MassRecord
|
|
376
430
|
# create an array of single insert queries
|
377
431
|
hashes.each do |hash|
|
378
432
|
sql = sql_for_insert hash, into:model
|
379
|
-
|
380
433
|
begin
|
381
434
|
query sql, connection:model
|
435
|
+
self.individual_count += 1
|
436
|
+
logger << ".".black.on_white if logger.debug?
|
382
437
|
rescue Exception => e
|
383
|
-
|
438
|
+
logger.debug e.message
|
439
|
+
logger << 'E'.black.on_white if logger.info?
|
384
440
|
errors << IndividualError.new(e,table:into,operation:"insert",json_object:hash)
|
385
441
|
end
|
386
442
|
end
|
@@ -401,6 +457,7 @@ module MassRecord
|
|
401
457
|
|
402
458
|
errors = {}
|
403
459
|
tables.each do |table|
|
460
|
+
# logger.info "Table: #{table}".black.on_white
|
404
461
|
hashes = json_objects.select{|o| o[key[:table]] == table}.collect{|x| x[key[:object]]}
|
405
462
|
|
406
463
|
errors[table.to_sym] = {} unless errors[table.to_sym].is_a? Hash
|
@@ -420,6 +477,7 @@ module MassRecord
|
|
420
477
|
|
421
478
|
errors = {}
|
422
479
|
tables.each do |table|
|
480
|
+
# logger.info "Table: #{table}".black.on_white
|
423
481
|
# sort the hashes by operation type
|
424
482
|
sorted_hashes = sort_save_operations from:json_objects, for_table:table, key:key
|
425
483
|
|
@@ -466,7 +524,7 @@ module MassRecord
|
|
466
524
|
update = "UPDATE #{model.table_name} SET "
|
467
525
|
where_clauses = []
|
468
526
|
id_column_name.each do |key|
|
469
|
-
value_set = ids.collect{|id_set|
|
527
|
+
value_set = ids.collect{|id_set| model.connection.quote(model.connection.type_cast(id_set[key], model.column_types[key]))}
|
470
528
|
where_clauses << "(#{model.table_name}.#{key} in (#{value_set.join ','}))"
|
471
529
|
end
|
472
530
|
where = "WHERE #{where_clauses.join ' and '}"
|
@@ -481,9 +539,9 @@ module MassRecord
|
|
481
539
|
set_fragments[k] = [] unless set_fragments.has_key? k and set_fragments[k].is_a? Array
|
482
540
|
case_fragments = []
|
483
541
|
id_column_name.each do |key|
|
484
|
-
case_fragments << "#{
|
542
|
+
case_fragments << "#{model.connection.quote_column_name key} = #{model.connection.quote hash[key]}"
|
485
543
|
end
|
486
|
-
set_fragments[k] << "WHEN (#{case_fragments.join ' and '}) THEN #{
|
544
|
+
set_fragments[k] << "WHEN (#{case_fragments.join ' and '}) THEN #{model.connection.quote v}"
|
487
545
|
end
|
488
546
|
end
|
489
547
|
end
|
@@ -501,7 +559,7 @@ module MassRecord
|
|
501
559
|
hash.each do |k,v|
|
502
560
|
if k != id_column_name
|
503
561
|
set_fragments[k] = [] unless set_fragments.has_key? k and set_fragments[k].is_a? Array
|
504
|
-
set_fragments[k] << "WHEN #{
|
562
|
+
set_fragments[k] << "WHEN #{model.connection.quote hash[id_column_name]} THEN #{model.connection.quote v}"
|
505
563
|
end
|
506
564
|
end
|
507
565
|
end
|
@@ -517,8 +575,10 @@ module MassRecord
|
|
517
575
|
|
518
576
|
begin
|
519
577
|
query "#{update} #{set_columns.join ', '} #{where}", connection:model
|
578
|
+
self.mass_count += 1
|
520
579
|
rescue Exception => e
|
521
|
-
|
580
|
+
logger.debug e.message
|
581
|
+
logger.info e.message.to_s[0..1000]
|
522
582
|
errors[column_set] = e
|
523
583
|
end
|
524
584
|
end
|
@@ -536,6 +596,7 @@ module MassRecord
|
|
536
596
|
|
537
597
|
errors = {}
|
538
598
|
tables.each do |table|
|
599
|
+
# logger.info "Table: #{table}".black.on_white
|
539
600
|
hashes = json_objects.select{|o| o[key[:table]] == table}.collect{|x| x[key[:object]]}
|
540
601
|
|
541
602
|
errors[table.to_sym] = {} unless errors[table.to_sym].is_a? Hash
|
@@ -557,7 +618,9 @@ module MassRecord
|
|
557
618
|
model = get_model from:into
|
558
619
|
concentrated_queries = {}
|
559
620
|
|
621
|
+
logger.debug "#{into}: Parsing #{hashes.count} hashes into a single query>".black.on_white
|
560
622
|
hashes.each do |hash|
|
623
|
+
logger << ".".black.on_white if logger.debug?
|
561
624
|
original_key_set = hash.keys.sort
|
562
625
|
sql = sql_for_insert hash, into:model
|
563
626
|
|
@@ -575,15 +638,21 @@ module MassRecord
|
|
575
638
|
|
576
639
|
# reparse the queries and execute them
|
577
640
|
concentrated_queries.each do |column_set,clauses|
|
641
|
+
final_query = "#{clauses[:into]} VALUES #{clauses[:values].join(", ")}"
|
578
642
|
begin
|
579
|
-
|
643
|
+
# puts "press enter to continue...:" if Rails.env = 'development' and defined?(Rails::Console) and logger.debug?
|
644
|
+
# gets if Rails.env = 'development' and defined?(Rails::Console) and logger.debug?
|
645
|
+
query final_query, connection:model
|
646
|
+
self.mass_count += 1
|
580
647
|
rescue Exception => e
|
581
|
-
|
648
|
+
logger.debug e.message
|
649
|
+
logger.info e.message.to_s[0..1000]
|
582
650
|
errors[column_set] = e
|
583
651
|
end
|
584
652
|
end
|
585
653
|
return errors
|
586
654
|
rescue Exception => e
|
655
|
+
logger.error e.message
|
587
656
|
return (defined? errors) ? (errors.merge!({run_time:e})) : {run_time:e}
|
588
657
|
end
|
589
658
|
end
|
@@ -601,13 +670,16 @@ module MassRecord
|
|
601
670
|
|
602
671
|
# convert to correct database type
|
603
672
|
begin
|
604
|
-
v =
|
673
|
+
v = model.connection.type_cast v, model.column_types[k]
|
674
|
+
v = model.connection.quote_string v if v.is_a? String
|
605
675
|
rescue Exception => e # If it is a text field, automatically yamlize it if there is a non text type passed in (just like normal active record saves)
|
606
|
-
v =
|
676
|
+
v = model.connection.type_cast v.to_yaml, model.column_types[k] if e.is_a? TypeError and model.column_types[k].type == :text
|
607
677
|
end
|
608
678
|
json_object[k] = v
|
609
679
|
end
|
610
680
|
|
681
|
+
#TODO: handle if updated_at field is not present in the hash, but is in the model (so that all transactions have an accurate updated_at)
|
682
|
+
|
611
683
|
return json_object
|
612
684
|
end
|
613
685
|
|
data/lib/mass_record/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: mass_record
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.4.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Nathan Hanna
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2014-
|
11
|
+
date: 2014-12-05 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: rails
|
@@ -129,7 +129,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
129
129
|
version: '0'
|
130
130
|
requirements: []
|
131
131
|
rubyforge_project:
|
132
|
-
rubygems_version: 2.4.
|
132
|
+
rubygems_version: 2.4.4
|
133
133
|
signing_key:
|
134
134
|
specification_version: 4
|
135
135
|
summary: A Ruby on Rails library to help with mass database operations like insert,
|