activerecord-import 0.19.0 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (43) hide show
  1. checksums.yaml +5 -5
  2. data/.travis.yml +22 -12
  3. data/CHANGELOG.md +166 -0
  4. data/Gemfile +13 -10
  5. data/README.markdown +548 -5
  6. data/Rakefile +2 -1
  7. data/benchmarks/lib/cli_parser.rb +2 -1
  8. data/gemfiles/5.1.gemfile +1 -0
  9. data/gemfiles/5.2.gemfile +2 -0
  10. data/lib/activerecord-import/adapters/abstract_adapter.rb +2 -2
  11. data/lib/activerecord-import/adapters/mysql_adapter.rb +16 -10
  12. data/lib/activerecord-import/adapters/postgresql_adapter.rb +59 -15
  13. data/lib/activerecord-import/adapters/sqlite3_adapter.rb +126 -3
  14. data/lib/activerecord-import/base.rb +4 -6
  15. data/lib/activerecord-import/import.rb +384 -126
  16. data/lib/activerecord-import/synchronize.rb +1 -1
  17. data/lib/activerecord-import/value_sets_parser.rb +14 -0
  18. data/lib/activerecord-import/version.rb +1 -1
  19. data/lib/activerecord-import.rb +2 -15
  20. data/test/adapters/makara_postgis.rb +1 -0
  21. data/test/import_test.rb +148 -14
  22. data/test/makara_postgis/import_test.rb +8 -0
  23. data/test/models/account.rb +3 -0
  24. data/test/models/bike_maker.rb +7 -0
  25. data/test/models/topic.rb +10 -0
  26. data/test/models/user.rb +3 -0
  27. data/test/models/user_token.rb +4 -0
  28. data/test/schema/generic_schema.rb +20 -0
  29. data/test/schema/mysql2_schema.rb +19 -0
  30. data/test/schema/postgresql_schema.rb +1 -0
  31. data/test/schema/sqlite3_schema.rb +13 -0
  32. data/test/support/factories.rb +9 -8
  33. data/test/support/generate.rb +6 -6
  34. data/test/support/mysql/import_examples.rb +14 -2
  35. data/test/support/postgresql/import_examples.rb +142 -0
  36. data/test/support/shared_examples/on_duplicate_key_update.rb +252 -1
  37. data/test/support/shared_examples/recursive_import.rb +41 -11
  38. data/test/support/sqlite3/import_examples.rb +187 -10
  39. data/test/synchronize_test.rb +8 -0
  40. data/test/test_helper.rb +9 -1
  41. data/test/value_sets_bytes_parser_test.rb +13 -2
  42. metadata +20 -5
  43. data/test/schema/mysql_schema.rb +0 -16
@@ -24,6 +24,36 @@ def should_support_postgresql_import_functionality
24
24
  end
25
25
  end
26
26
 
27
+ context "setting attributes and marking clean" do
28
+ let(:topic) { Build(:topics) }
29
+
30
+ setup { Topic.import([topic]) }
31
+
32
+ it "assigns ids" do
33
+ assert topic.id.present?
34
+ end
35
+
36
+ it "marks models as clean" do
37
+ assert !topic.changed?
38
+ end
39
+
40
+ if ENV['AR_VERSION'].to_f > 4.1
41
+ it "moves the dirty changes to previous_changes" do
42
+ assert topic.previous_changes.present?
43
+ end
44
+ end
45
+
46
+ it "marks models as persisted" do
47
+ assert !topic.new_record?
48
+ assert topic.persisted?
49
+ end
50
+
51
+ it "assigns timestamps" do
52
+ assert topic.created_at.present?
53
+ assert topic.updated_at.present?
54
+ end
55
+ end
56
+
27
57
  describe "with query cache enabled" do
28
58
  setup do
29
59
  unless ActiveRecord::Base.connection.query_cache_enabled
@@ -61,6 +91,76 @@ def should_support_postgresql_import_functionality
61
91
  assert_equal [], Book.import(books, no_returning: true).ids
62
92
  end
63
93
  end
94
+
95
+ describe "returning" do
96
+ let(:books) { [Book.new(author_name: "King", title: "It")] }
97
+ let(:result) { Book.import(books, returning: %w(author_name title)) }
98
+ let(:book_id) do
99
+ if RUBY_PLATFORM == 'java' || ENV['AR_VERSION'].to_i >= 5.0
100
+ books.first.id
101
+ else
102
+ books.first.id.to_s
103
+ end
104
+ end
105
+
106
+ it "creates records" do
107
+ assert_difference("Book.count", +1) { result }
108
+ end
109
+
110
+ it "returns ids" do
111
+ result
112
+ assert_equal [book_id], result.ids
113
+ end
114
+
115
+ it "returns specified columns" do
116
+ assert_equal [%w(King It)], result.results
117
+ end
118
+
119
+ context "when primary key and returning overlap" do
120
+ let(:result) { Book.import(books, returning: %w(id title)) }
121
+
122
+ setup { result }
123
+
124
+ it "returns ids" do
125
+ assert_equal [book_id], result.ids
126
+ end
127
+
128
+ it "returns specified columns" do
129
+ assert_equal [[book_id, 'It']], result.results
130
+ end
131
+ end
132
+
133
+ context "setting model attributes" do
134
+ let(:code) { 'abc' }
135
+ let(:discount) { 0.10 }
136
+ let(:original_promotion) do
137
+ Promotion.new(code: code, discount: discount)
138
+ end
139
+ let(:updated_promotion) do
140
+ Promotion.new(code: code, description: 'ABC discount')
141
+ end
142
+ let(:returning_columns) { %w(discount) }
143
+
144
+ setup do
145
+ Promotion.import([original_promotion])
146
+ Promotion.import([updated_promotion],
147
+ on_duplicate_key_update: { conflict_target: %i(code), columns: %i(description) },
148
+ returning: returning_columns)
149
+ end
150
+
151
+ it "sets model attributes" do
152
+ assert_equal updated_promotion.discount, discount
153
+ end
154
+
155
+ context "returning multiple columns" do
156
+ let(:returning_columns) { %w(discount description) }
157
+
158
+ it "sets model attributes" do
159
+ assert_equal updated_promotion.discount, discount
160
+ end
161
+ end
162
+ end
163
+ end
64
164
  end
65
165
 
66
166
  if ENV['AR_VERSION'].to_f >= 4.0
@@ -135,6 +235,17 @@ def should_support_postgresql_import_functionality
135
235
  end
136
236
  end
137
237
  end
238
+
239
+ describe "with binary field" do
240
+ let(:binary_value) { "\xE0'c\xB2\xB0\xB3Bh\\\xC2M\xB1m\\I\xC4r".force_encoding('ASCII-8BIT') }
241
+ it "imports the correct values for binary fields" do
242
+ alarms = [Alarm.new(device_id: 1, alarm_type: 1, status: 1, secret_key: binary_value)]
243
+ assert_difference "Alarm.count", +1 do
244
+ Alarm.import alarms
245
+ end
246
+ assert_equal(binary_value, Alarm.first.secret_key)
247
+ end
248
+ end
138
249
  end
139
250
 
140
251
  def should_support_postgresql_upsert_functionality
@@ -190,6 +301,30 @@ def should_support_postgresql_upsert_functionality
190
301
  end
191
302
 
192
303
  context "using a hash" do
304
+ context "with :columns :all" do
305
+ let(:columns) { %w( id title author_name author_email_address parent_id ) }
306
+ let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] }
307
+
308
+ macro(:perform_import) do |*opts|
309
+ Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: :all }, validate: false)
310
+ end
311
+
312
+ setup do
313
+ values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]]
314
+ Topic.import columns + ['replies_count'], values, validate: false
315
+ end
316
+
317
+ it "should update all specified columns" do
318
+ perform_import
319
+ updated_topic = Topic.find(99)
320
+ assert_equal 'Book - 2nd Edition', updated_topic.title
321
+ assert_equal 'Jane Doe', updated_topic.author_name
322
+ assert_equal 'janedoe@example.com', updated_topic.author_email_address
323
+ assert_equal 57, updated_topic.parent_id
324
+ assert_equal 3, updated_topic.replies_count
325
+ end
326
+ end
327
+
193
328
  context "with :columns a hash" do
194
329
  let(:columns) { %w( id title author_name author_email_address parent_id ) }
195
330
  let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] }
@@ -204,6 +339,13 @@ def should_support_postgresql_upsert_functionality
204
339
  @topic = Topic.find 99
205
340
  end
206
341
 
342
+ it "should not modify the passed in :on_duplicate_key_update columns array" do
343
+ assert_nothing_raised do
344
+ columns = %w(title author_name).freeze
345
+ Topic.import columns, [%w(foo, bar)], { on_duplicate_key_update: { columns: columns }.freeze }.freeze
346
+ end
347
+ end
348
+
207
349
  context "using string hash map" do
208
350
  let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } }
209
351
  should_support_on_duplicate_key_update
@@ -5,9 +5,240 @@ def should_support_basic_on_duplicate_key_update
5
5
  macro(:perform_import) { raise "supply your own #perform_import in a context below" }
6
6
  macro(:updated_topic) { Topic.find(@topic.id) }
7
7
 
8
+ context "with lock_version upsert" do
9
+ describe 'optimistic lock' do
10
+ it 'lock_version upsert after on_duplcate_key_update by model' do
11
+ users = [
12
+ User.new(name: 'Salomon'),
13
+ User.new(name: 'Nathan')
14
+ ]
15
+ User.import(users)
16
+ assert User.count == users.length
17
+ User.all.each do |user|
18
+ assert_equal 0, user.lock_version
19
+ end
20
+ updated_users = User.all.map do |user|
21
+ user.name += ' Rothschild'
22
+ user
23
+ end
24
+ User.import(updated_users, on_duplicate_key_update: [:name])
25
+ assert User.count == updated_users.length
26
+ User.all.each_with_index do |user, i|
27
+ assert_equal user.name, users[i].name + ' Rothschild'
28
+ assert_equal 1, user.lock_version
29
+ end
30
+ end
31
+
32
+ it 'lock_version upsert after on_duplcate_key_update by array' do
33
+ users = [
34
+ User.new(name: 'Salomon'),
35
+ User.new(name: 'Nathan')
36
+ ]
37
+ User.import(users)
38
+ assert User.count == users.length
39
+ User.all.each do |user|
40
+ assert_equal 0, user.lock_version
41
+ end
42
+
43
+ columns = [:id, :name]
44
+ updated_values = User.all.map do |user|
45
+ user.name += ' Rothschild'
46
+ [user.id, user.name]
47
+ end
48
+ User.import(columns, updated_values, on_duplicate_key_update: [:name])
49
+ assert User.count == updated_values.length
50
+ User.all.each_with_index do |user, i|
51
+ assert_equal user.name, users[i].name + ' Rothschild'
52
+ assert_equal 1, user.lock_version
53
+ end
54
+ end
55
+
56
+ it 'lock_version upsert after on_duplcate_key_update by hash' do
57
+ users = [
58
+ User.new(name: 'Salomon'),
59
+ User.new(name: 'Nathan')
60
+ ]
61
+ User.import(users)
62
+ assert User.count == users.length
63
+ User.all.each do |user|
64
+ assert_equal 0, user.lock_version
65
+ end
66
+ updated_values = User.all.map do |user|
67
+ user.name += ' Rothschild'
68
+ { id: user.id, name: user.name }
69
+ end
70
+ User.import(updated_values, on_duplicate_key_update: [:name])
71
+ assert User.count == updated_values.length
72
+ User.all.each_with_index do |user, i|
73
+ assert_equal user.name, users[i].name + ' Rothschild'
74
+ assert_equal 1, user.lock_version
75
+ end
76
+ end
77
+
78
+ it 'upsert optimistic lock columns other than lock_version by model' do
79
+ accounts = [
80
+ Account.new(name: 'Salomon'),
81
+ Account.new(name: 'Nathan')
82
+ ]
83
+ Account.import(accounts)
84
+ assert Account.count == accounts.length
85
+ Account.all.each do |user|
86
+ assert_equal 0, user.lock
87
+ end
88
+ updated_accounts = Account.all.map do |user|
89
+ user.name += ' Rothschild'
90
+ user
91
+ end
92
+ Account.import(updated_accounts, on_duplicate_key_update: [:id, :name])
93
+ assert Account.count == updated_accounts.length
94
+ Account.all.each_with_index do |user, i|
95
+ assert_equal user.name, accounts[i].name + ' Rothschild'
96
+ assert_equal 1, user.lock
97
+ end
98
+ end
99
+
100
+ it 'upsert optimistic lock columns other than lock_version by array' do
101
+ accounts = [
102
+ Account.new(name: 'Salomon'),
103
+ Account.new(name: 'Nathan')
104
+ ]
105
+ Account.import(accounts)
106
+ assert Account.count == accounts.length
107
+ Account.all.each do |user|
108
+ assert_equal 0, user.lock
109
+ end
110
+
111
+ columns = [:id, :name]
112
+ updated_values = Account.all.map do |user|
113
+ user.name += ' Rothschild'
114
+ [user.id, user.name]
115
+ end
116
+ Account.import(columns, updated_values, on_duplicate_key_update: [:name])
117
+ assert Account.count == updated_values.length
118
+ Account.all.each_with_index do |user, i|
119
+ assert_equal user.name, accounts[i].name + ' Rothschild'
120
+ assert_equal 1, user.lock
121
+ end
122
+ end
123
+
124
+ it 'upsert optimistic lock columns other than lock_version by hash' do
125
+ accounts = [
126
+ Account.new(name: 'Salomon'),
127
+ Account.new(name: 'Nathan')
128
+ ]
129
+ Account.import(accounts)
130
+ assert Account.count == accounts.length
131
+ Account.all.each do |user|
132
+ assert_equal 0, user.lock
133
+ end
134
+ updated_values = Account.all.map do |user|
135
+ user.name += ' Rothschild'
136
+ { id: user.id, name: user.name }
137
+ end
138
+ Account.import(updated_values, on_duplicate_key_update: [:name])
139
+ assert Account.count == updated_values.length
140
+ Account.all.each_with_index do |user, i|
141
+ assert_equal user.name, accounts[i].name + ' Rothschild'
142
+ assert_equal 1, user.lock
143
+ end
144
+ end
145
+
146
+ it 'update the lock_version of models separated by namespaces by model' do
147
+ makers = [
148
+ Bike::Maker.new(name: 'Yamaha'),
149
+ Bike::Maker.new(name: 'Honda')
150
+ ]
151
+ Bike::Maker.import(makers)
152
+ assert Bike::Maker.count == makers.length
153
+ Bike::Maker.all.each do |maker|
154
+ assert_equal 0, maker.lock_version
155
+ end
156
+ updated_makers = Bike::Maker.all.map do |maker|
157
+ maker.name += ' bikes'
158
+ maker
159
+ end
160
+ Bike::Maker.import(updated_makers, on_duplicate_key_update: [:name])
161
+ assert Bike::Maker.count == updated_makers.length
162
+ Bike::Maker.all.each_with_index do |maker, i|
163
+ assert_equal maker.name, makers[i].name + ' bikes'
164
+ assert_equal 1, maker.lock_version
165
+ end
166
+ end
167
+ it 'update the lock_version of models separated by namespaces by array' do
168
+ makers = [
169
+ Bike::Maker.new(name: 'Yamaha'),
170
+ Bike::Maker.new(name: 'Honda')
171
+ ]
172
+ Bike::Maker.import(makers)
173
+ assert Bike::Maker.count == makers.length
174
+ Bike::Maker.all.each do |maker|
175
+ assert_equal 0, maker.lock_version
176
+ end
177
+
178
+ columns = [:id, :name]
179
+ updated_values = Bike::Maker.all.map do |maker|
180
+ maker.name += ' bikes'
181
+ [maker.id, maker.name]
182
+ end
183
+ Bike::Maker.import(columns, updated_values, on_duplicate_key_update: [:name])
184
+ assert Bike::Maker.count == updated_values.length
185
+ Bike::Maker.all.each_with_index do |maker, i|
186
+ assert_equal maker.name, makers[i].name + ' bikes'
187
+ assert_equal 1, maker.lock_version
188
+ end
189
+ end
190
+
191
+ it 'update the lock_version of models separated by namespaces by hash' do
192
+ makers = [
193
+ Bike::Maker.new(name: 'Yamaha'),
194
+ Bike::Maker.new(name: 'Honda')
195
+ ]
196
+ Bike::Maker.import(makers)
197
+ assert Bike::Maker.count == makers.length
198
+ Bike::Maker.all.each do |maker|
199
+ assert_equal 0, maker.lock_version
200
+ end
201
+ updated_values = Bike::Maker.all.map do |maker|
202
+ maker.name += ' bikes'
203
+ { id: maker.id, name: maker.name }
204
+ end
205
+ Bike::Maker.import(updated_values, on_duplicate_key_update: [:name])
206
+ assert Bike::Maker.count == updated_values.length
207
+ Bike::Maker.all.each_with_index do |maker, i|
208
+ assert_equal maker.name, makers[i].name + ' bikes'
209
+ assert_equal 1, maker.lock_version
210
+ end
211
+ end
212
+ end
213
+ end
214
+
8
215
  context "with :on_duplicate_key_update" do
216
+ describe 'using :all' do
217
+ let(:columns) { %w( id title author_name author_email_address parent_id ) }
218
+ let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] }
219
+
220
+ macro(:perform_import) do |*opts|
221
+ Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: :all, validate: false)
222
+ end
223
+
224
+ setup do
225
+ values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]]
226
+ Topic.import columns + ['replies_count'], values, validate: false
227
+ end
228
+
229
+ it 'updates all specified columns' do
230
+ perform_import
231
+ updated_topic = Topic.find(99)
232
+ assert_equal 'Book - 2nd Edition', updated_topic.title
233
+ assert_equal 'Jane Doe', updated_topic.author_name
234
+ assert_equal 'janedoe@example.com', updated_topic.author_email_address
235
+ assert_equal 57, updated_topic.parent_id
236
+ assert_equal 3, updated_topic.replies_count
237
+ end
238
+ end
239
+
9
240
  describe "argument safety" do
10
- it "should not modify the passed in :on_duplicate_key_update columns array" do
241
+ it "should not modify the passed in :on_duplicate_key_update array" do
11
242
  assert_nothing_raised do
12
243
  columns = %w(title author_name).freeze
13
244
  Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: columns
@@ -15,6 +246,26 @@ def should_support_basic_on_duplicate_key_update
15
246
  end
16
247
  end
17
248
 
249
+ context "with timestamps enabled" do
250
+ let(:time) { Chronic.parse("5 minutes from now") }
251
+
252
+ it 'should not overwrite changed updated_at with current timestamp' do
253
+ topic = Topic.create(author_name: "Jane Doe", title: "Book")
254
+ timestamp = Time.now.utc
255
+ topic.updated_at = timestamp
256
+ Topic.import [topic], on_duplicate_key_update: :all, validate: false
257
+ assert_equal timestamp.to_s, Topic.last.updated_at.to_s
258
+ end
259
+
260
+ it 'should update updated_at with current timestamp' do
261
+ topic = Topic.create(author_name: "Jane Doe", title: "Book")
262
+ Timecop.freeze(time) do
263
+ Topic.import [topic], on_duplicate_key_update: [:updated_at], validate: false
264
+ assert_in_delta time.to_i, topic.reload.updated_at.to_i, 1.second
265
+ end
266
+ end
267
+ end
268
+
18
269
  context "with validation checks turned off" do
19
270
  asssertion_group(:should_support_on_duplicate_key_update) do
20
271
  should_not_update_fields_not_mentioned
@@ -11,7 +11,7 @@ def should_support_recursive_import
11
11
  let(:num_chapters) { 18 }
12
12
  let(:num_endnotes) { 24 }
13
13
 
14
- let(:new_question_with_rule) { FactoryGirl.build :question, :with_rule }
14
+ let(:new_question_with_rule) { FactoryBot.build :question, :with_rule }
15
15
 
16
16
  it 'imports top level' do
17
17
  assert_difference "Topic.count", +num_topics do
@@ -90,6 +90,19 @@ def should_support_recursive_import
90
90
  end
91
91
  end
92
92
 
93
+ # Models are only valid if all associations are valid
94
+ it "only imports models with valid associations" do
95
+ assert_difference "Topic.count", 2 do
96
+ assert_difference "Book.count", 4 do
97
+ assert_difference "Chapter.count", 12 do
98
+ assert_difference "EndNote.count", 16 do
99
+ Topic.import new_topics_with_invalid_chapter, recursive: true
100
+ end
101
+ end
102
+ end
103
+ end
104
+ end
105
+
93
106
  it "skips validation of the associations if requested" do
94
107
  assert_difference "Chapter.count", +num_chapters do
95
108
  Topic.import new_topics_with_invalid_chapter, validate: false, recursive: true
@@ -102,6 +115,31 @@ def should_support_recursive_import
102
115
  end
103
116
  end
104
117
 
118
+ it "imports an imported belongs_to association id" do
119
+ first_new_topic = new_topics[0]
120
+ second_new_topic = new_topics[1]
121
+
122
+ books = first_new_topic.books.to_a
123
+ Topic.import new_topics, validate: false
124
+
125
+ assert_difference "Book.count", books.size do
126
+ Book.import books, validate: false
127
+ end
128
+
129
+ books.each do |book|
130
+ assert_equal book.topic_id, first_new_topic.id
131
+ end
132
+
133
+ books.each { |book| book.topic_id = second_new_topic.id }
134
+ assert_no_difference "Book.count", books.size do
135
+ Book.import books, validate: false, on_duplicate_key_update: [:topic_id]
136
+ end
137
+
138
+ books.each do |book|
139
+ assert_equal book.topic_id, second_new_topic.id
140
+ end
141
+ end
142
+
105
143
  unless ENV["SKIP_COMPOSITE_PK"]
106
144
  describe "with composite primary keys" do
107
145
  it "should import models and set id" do
@@ -119,22 +157,14 @@ def should_support_recursive_import
119
157
  end
120
158
  end
121
159
 
122
- # These models dont validate associated. So we expect that books and topics get inserted, but not chapters
123
- # Putting a transaction around everything wouldn't work, so if you want your chapters to prevent topics from
124
- # being created, you would need to have validates_associated in your models and insert with validation
125
160
  describe "all_or_none" do
126
- [Book, Topic, EndNote].each do |type|
161
+ [Book, Chapter, Topic, EndNote].each do |type|
127
162
  it "creates #{type}" do
128
- assert_difference "#{type}.count", send("num_#{type.to_s.downcase}s") do
163
+ assert_difference "#{type}.count", 0 do
129
164
  Topic.import new_topics_with_invalid_chapter, all_or_none: true, recursive: true
130
165
  end
131
166
  end
132
167
  end
133
- it "doesn't create chapters" do
134
- assert_difference "Chapter.count", 0 do
135
- Topic.import new_topics_with_invalid_chapter, all_or_none: true, recursive: true
136
- end
137
- end
138
168
  end
139
169
 
140
170
  # If adapter supports on_duplicate_key_update, it is only applied to top level models so that SQL with invalid