activerecord-import 0.23.0 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +5 -5
  2. data/.github/workflows/test.yaml +107 -0
  3. data/.gitignore +1 -0
  4. data/CHANGELOG.md +214 -4
  5. data/Gemfile +11 -9
  6. data/LICENSE +21 -56
  7. data/README.markdown +574 -22
  8. data/Rakefile +2 -1
  9. data/activerecord-import.gemspec +4 -4
  10. data/benchmarks/benchmark.rb +5 -1
  11. data/benchmarks/schema/{mysql_schema.rb → mysql2_schema.rb} +0 -0
  12. data/gemfiles/5.0.gemfile +1 -0
  13. data/gemfiles/5.1.gemfile +1 -0
  14. data/gemfiles/5.2.gemfile +2 -2
  15. data/gemfiles/6.0.gemfile +2 -0
  16. data/gemfiles/6.1.gemfile +2 -0
  17. data/gemfiles/7.0.gemfile +1 -0
  18. data/lib/activerecord-import/active_record/adapters/jdbcmysql_adapter.rb +4 -4
  19. data/lib/activerecord-import/adapters/abstract_adapter.rb +7 -1
  20. data/lib/activerecord-import/adapters/mysql_adapter.rb +8 -11
  21. data/lib/activerecord-import/adapters/postgresql_adapter.rb +14 -16
  22. data/lib/activerecord-import/adapters/sqlite3_adapter.rb +125 -8
  23. data/lib/activerecord-import/base.rb +9 -1
  24. data/lib/activerecord-import/import.rb +269 -123
  25. data/lib/activerecord-import/synchronize.rb +2 -2
  26. data/lib/activerecord-import/value_sets_parser.rb +2 -0
  27. data/lib/activerecord-import/version.rb +1 -1
  28. data/lib/activerecord-import.rb +1 -0
  29. data/test/adapters/makara_postgis.rb +1 -0
  30. data/test/{travis → github}/database.yml +3 -1
  31. data/test/import_test.rb +138 -8
  32. data/test/makara_postgis/import_test.rb +8 -0
  33. data/test/models/animal.rb +6 -0
  34. data/test/models/card.rb +3 -0
  35. data/test/models/customer.rb +6 -0
  36. data/test/models/deck.rb +6 -0
  37. data/test/models/order.rb +6 -0
  38. data/test/models/playing_card.rb +2 -0
  39. data/test/models/user.rb +3 -1
  40. data/test/models/user_token.rb +4 -0
  41. data/test/schema/generic_schema.rb +30 -0
  42. data/test/schema/mysql2_schema.rb +19 -0
  43. data/test/schema/postgresql_schema.rb +16 -0
  44. data/test/schema/sqlite3_schema.rb +13 -0
  45. data/test/support/factories.rb +8 -8
  46. data/test/support/generate.rb +6 -6
  47. data/test/support/mysql/import_examples.rb +12 -0
  48. data/test/support/postgresql/import_examples.rb +100 -2
  49. data/test/support/shared_examples/on_duplicate_key_update.rb +54 -0
  50. data/test/support/shared_examples/recursive_import.rb +74 -4
  51. data/test/support/sqlite3/import_examples.rb +189 -25
  52. data/test/test_helper.rb +28 -3
  53. metadata +37 -18
  54. data/.travis.yml +0 -62
  55. data/gemfiles/3.2.gemfile +0 -2
  56. data/gemfiles/4.0.gemfile +0 -2
  57. data/gemfiles/4.1.gemfile +0 -2
  58. data/test/schema/mysql_schema.rb +0 -16
@@ -37,6 +37,12 @@ def should_support_postgresql_import_functionality
37
37
  assert !topic.changed?
38
38
  end
39
39
 
40
+ if ENV['AR_VERSION'].to_f > 4.1
41
+ it "moves the dirty changes to previous_changes" do
42
+ assert topic.previous_changes.present?
43
+ end
44
+ end
45
+
40
46
  it "marks models as persisted" do
41
47
  assert !topic.new_record?
42
48
  assert topic.persisted?
@@ -110,6 +116,26 @@ def should_support_postgresql_import_functionality
110
116
  assert_equal [%w(King It)], result.results
111
117
  end
112
118
 
119
+ context "when given an empty array" do
120
+ let(:result) { Book.import([], returning: %w(title)) }
121
+
122
+ setup { result }
123
+
124
+ it "returns empty arrays for ids and results" do
125
+ assert_equal [], result.ids
126
+ assert_equal [], result.results
127
+ end
128
+ end
129
+
130
+ context "when a returning column is a serialized attribute" do
131
+ let(:vendor) { Vendor.new(hours: { monday: '8-5' }) }
132
+ let(:result) { Vendor.import([vendor], returning: %w(hours)) }
133
+
134
+ it "creates records" do
135
+ assert_difference("Vendor.count", +1) { result }
136
+ end
137
+ end
138
+
113
139
  context "when primary key and returning overlap" do
114
140
  let(:result) { Book.import(books, returning: %w(id title)) }
115
141
 
@@ -228,10 +254,34 @@ def should_support_postgresql_import_functionality
228
254
  assert_equal({}, Vendor.first.json_data)
229
255
  end
230
256
  end
257
+
258
+ %w(json jsonb).each do |json_type|
259
+ describe "with pure #{json_type} fields" do
260
+ let(:data) { { a: :b } }
261
+ let(:json_field_name) { "pure_#{json_type}_data" }
262
+ it "imports the values from saved records" do
263
+ vendor = Vendor.create!(name: 'Vendor 1', json_field_name => data)
264
+
265
+ Vendor.import [vendor], on_duplicate_key_update: [json_field_name]
266
+ assert_equal(data.as_json, vendor.reload[json_field_name])
267
+ end
268
+ end
269
+ end
270
+ end
271
+
272
+ describe "with enum field" do
273
+ let(:vendor_type) { "retailer" }
274
+ it "imports the correct values for enum fields" do
275
+ vendor = Vendor.new(name: 'Vendor 1', vendor_type: vendor_type)
276
+ assert_difference "Vendor.count", +1 do
277
+ Vendor.import [vendor]
278
+ end
279
+ assert_equal(vendor_type, Vendor.first.vendor_type)
280
+ end
231
281
  end
232
282
 
233
283
  describe "with binary field" do
234
- let(:binary_value) { "\xE0'c\xB2\xB0\xB3Bh\\\xC2M\xB1m\\I\xC4r".force_encoding('ASCII-8BIT') }
284
+ let(:binary_value) { "\xE0'c\xB2\xB0\xB3Bh\\\xC2M\xB1m\\I\xC4r".dup.force_encoding('ASCII-8BIT') }
235
285
  it "imports the correct values for binary fields" do
236
286
  alarms = [Alarm.new(device_id: 1, alarm_type: 1, status: 1, secret_key: binary_value)]
237
287
  assert_difference "Alarm.count", +1 do
@@ -240,6 +290,30 @@ def should_support_postgresql_import_functionality
240
290
  assert_equal(binary_value, Alarm.first.secret_key)
241
291
  end
242
292
  end
293
+
294
+ unless ENV["SKIP_COMPOSITE_PK"]
295
+ describe "with composite foreign keys" do
296
+ let(:account_id) { 555 }
297
+ let(:customer) { Customer.new(account_id: account_id, name: "foo") }
298
+ let(:order) { Order.new(account_id: account_id, amount: 100, customer: customer) }
299
+
300
+ it "imports and correctly maps foreign keys" do
301
+ assert_difference "Customer.count", +1 do
302
+ Customer.import [customer]
303
+ end
304
+
305
+ assert_difference "Order.count", +1 do
306
+ Order.import [order]
307
+ end
308
+
309
+ db_customer = Customer.last
310
+ db_order = Order.last
311
+
312
+ assert_equal db_customer.orders.last, db_order
313
+ assert_not_equal db_order.customer_id, nil
314
+ end
315
+ end
316
+ end
243
317
  end
244
318
 
245
319
  def should_support_postgresql_upsert_functionality
@@ -295,6 +369,30 @@ def should_support_postgresql_upsert_functionality
295
369
  end
296
370
 
297
371
  context "using a hash" do
372
+ context "with :columns :all" do
373
+ let(:columns) { %w( id title author_name author_email_address parent_id ) }
374
+ let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] }
375
+
376
+ macro(:perform_import) do |*opts|
377
+ Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: :all }, validate: false)
378
+ end
379
+
380
+ setup do
381
+ values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]]
382
+ Topic.import columns + ['replies_count'], values, validate: false
383
+ end
384
+
385
+ it "should update all specified columns" do
386
+ perform_import
387
+ updated_topic = Topic.find(99)
388
+ assert_equal 'Book - 2nd Edition', updated_topic.title
389
+ assert_equal 'Jane Doe', updated_topic.author_name
390
+ assert_equal 'janedoe@example.com', updated_topic.author_email_address
391
+ assert_equal 57, updated_topic.parent_id
392
+ assert_equal 3, updated_topic.replies_count
393
+ end
394
+ end
395
+
298
396
  context "with :columns a hash" do
299
397
  let(:columns) { %w( id title author_name author_email_address parent_id ) }
300
398
  let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] }
@@ -312,7 +410,7 @@ def should_support_postgresql_upsert_functionality
312
410
  it "should not modify the passed in :on_duplicate_key_update columns array" do
313
411
  assert_nothing_raised do
314
412
  columns = %w(title author_name).freeze
315
- Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: { columns: columns }
413
+ Topic.import columns, [%w(foo, bar)], { on_duplicate_key_update: { columns: columns }.freeze }.freeze
316
414
  end
317
415
  end
318
416
 
@@ -73,6 +73,16 @@ def should_support_basic_on_duplicate_key_update
73
73
  assert_equal user.name, users[i].name + ' Rothschild'
74
74
  assert_equal 1, user.lock_version
75
75
  end
76
+ updated_values2 = User.all.map do |user|
77
+ user.name += ' jr.'
78
+ { id: user.id, name: user.name }
79
+ end
80
+ User.import(updated_values2, on_duplicate_key_update: [:name])
81
+ assert User.count == updated_values2.length
82
+ User.all.each_with_index do |user, i|
83
+ assert_equal user.name, users[i].name + ' Rothschild jr.'
84
+ assert_equal 2, user.lock_version
85
+ end
76
86
  end
77
87
 
78
88
  it 'upsert optimistic lock columns other than lock_version by model' do
@@ -213,6 +223,30 @@ def should_support_basic_on_duplicate_key_update
213
223
  end
214
224
 
215
225
  context "with :on_duplicate_key_update" do
226
+ describe 'using :all' do
227
+ let(:columns) { %w( id title author_name author_email_address parent_id ) }
228
+ let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] }
229
+
230
+ macro(:perform_import) do |*opts|
231
+ Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: :all, validate: false)
232
+ end
233
+
234
+ setup do
235
+ values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]]
236
+ Topic.import columns + ['replies_count'], values, validate: false
237
+ end
238
+
239
+ it 'updates all specified columns' do
240
+ perform_import
241
+ updated_topic = Topic.find(99)
242
+ assert_equal 'Book - 2nd Edition', updated_topic.title
243
+ assert_equal 'Jane Doe', updated_topic.author_name
244
+ assert_equal 'janedoe@example.com', updated_topic.author_email_address
245
+ assert_equal 57, updated_topic.parent_id
246
+ assert_equal 3, updated_topic.replies_count
247
+ end
248
+ end
249
+
216
250
  describe "argument safety" do
217
251
  it "should not modify the passed in :on_duplicate_key_update array" do
218
252
  assert_nothing_raised do
@@ -222,6 +256,26 @@ def should_support_basic_on_duplicate_key_update
222
256
  end
223
257
  end
224
258
 
259
+ context "with timestamps enabled" do
260
+ let(:time) { Chronic.parse("5 minutes from now") }
261
+
262
+ it 'should not overwrite changed updated_at with current timestamp' do
263
+ topic = Topic.create(author_name: "Jane Doe", title: "Book")
264
+ timestamp = Time.now.utc
265
+ topic.updated_at = timestamp
266
+ Topic.import [topic], on_duplicate_key_update: :all, validate: false
267
+ assert_equal timestamp.to_s, Topic.last.updated_at.to_s
268
+ end
269
+
270
+ it 'should update updated_at with current timestamp' do
271
+ topic = Topic.create(author_name: "Jane Doe", title: "Book")
272
+ Timecop.freeze(time) do
273
+ Topic.import [topic], on_duplicate_key_update: [:updated_at], validate: false
274
+ assert_in_delta time.to_i, topic.reload.updated_at.to_i, 1.second
275
+ end
276
+ end
277
+ end
278
+
225
279
  context "with validation checks turned off" do
226
280
  asssertion_group(:should_support_on_duplicate_key_update) do
227
281
  should_not_update_fields_not_mentioned
@@ -11,7 +11,7 @@ def should_support_recursive_import
11
11
  let(:num_chapters) { 18 }
12
12
  let(:num_endnotes) { 24 }
13
13
 
14
- let(:new_question_with_rule) { FactoryGirl.build :question, :with_rule }
14
+ let(:new_question_with_rule) { FactoryBot.build :question, :with_rule }
15
15
 
16
16
  it 'imports top level' do
17
17
  assert_difference "Topic.count", +num_topics do
@@ -116,7 +116,10 @@ def should_support_recursive_import
116
116
  end
117
117
 
118
118
  it "imports an imported belongs_to association id" do
119
- books = new_topics[0].books.to_a
119
+ first_new_topic = new_topics[0]
120
+ second_new_topic = new_topics[1]
121
+
122
+ books = first_new_topic.books.to_a
120
123
  Topic.import new_topics, validate: false
121
124
 
122
125
  assert_difference "Book.count", books.size do
@@ -124,7 +127,25 @@ def should_support_recursive_import
124
127
  end
125
128
 
126
129
  books.each do |book|
127
- assert_not_nil book.topic_id
130
+ assert_equal book.topic_id, first_new_topic.id
131
+ end
132
+
133
+ books.each { |book| book.topic_id = second_new_topic.id }
134
+ assert_no_difference "Book.count", books.size do
135
+ Book.import books, validate: false, on_duplicate_key_update: [:topic_id]
136
+ end
137
+
138
+ books.each do |book|
139
+ assert_equal book.topic_id, second_new_topic.id
140
+ end
141
+
142
+ books.each { |book| book.topic_id = nil }
143
+ assert_no_difference "Book.count", books.size do
144
+ Book.import books, validate: false, on_duplicate_key_update: [:topic_id]
145
+ end
146
+
147
+ books.each do |book|
148
+ assert_equal book.topic_id, nil
128
149
  end
129
150
  end
130
151
 
@@ -155,7 +176,7 @@ def should_support_recursive_import
155
176
  end
156
177
  end
157
178
 
158
- # If adapter supports on_duplicate_key_update, it is only applied to top level models so that SQL with invalid
179
+ # If adapter supports on_duplicate_key_update and specific columns are specified, it is only applied to top level models so that SQL with invalid
159
180
  # columns, keys, etc isn't generated for child associations when doing recursive import
160
181
  if ActiveRecord::Base.connection.supports_on_duplicate_key_update?
161
182
  describe "on_duplicate_key_update" do
@@ -169,6 +190,55 @@ def should_support_recursive_import
169
190
  end
170
191
  end
171
192
  end
193
+
194
+ context "when :all fields are updated" do
195
+ setup do
196
+ Topic.import new_topics, recursive: true
197
+ end
198
+
199
+ it "updates associated objects" do
200
+ new_author_name = 'Richard Bachman'
201
+ topic = new_topics.first
202
+ topic.books.each do |book|
203
+ book.author_name = new_author_name
204
+ end
205
+ assert_nothing_raised do
206
+ Topic.import new_topics, recursive: true, on_duplicate_key_update: :all
207
+ end
208
+ Topic.find(topic.id).books.each do |book|
209
+ assert_equal new_author_name, book.author_name
210
+ end
211
+ end
212
+ end
213
+ end
214
+ end
215
+
216
+ # If returning option is provided, it is only applied to top level models so that SQL with invalid
217
+ # columns, keys, etc isn't generated for child associations when doing recursive import
218
+ describe "returning" do
219
+ let(:new_topics) { Build(1, :topic_with_book) }
220
+
221
+ it "imports objects with associations" do
222
+ assert_difference "Topic.count", +1 do
223
+ Topic.import new_topics, recursive: true, returning: [:content], validate: false
224
+ new_topics.each do |topic|
225
+ assert_not_nil topic.id
226
+ end
227
+ end
228
+ end
229
+ end
230
+
231
+ # If no returning option is provided, it is ignored
232
+ describe "no returning" do
233
+ let(:new_topics) { Build(1, :topic_with_book) }
234
+
235
+ it "is ignored and imports objects with associations" do
236
+ assert_difference "Topic.count", +1 do
237
+ Topic.import new_topics, recursive: true, no_returning: true, validate: false
238
+ new_topics.each do |topic|
239
+ assert_not_nil topic.id
240
+ end
241
+ end
172
242
  end
173
243
  end
174
244
  end
@@ -1,23 +1,12 @@
1
1
  # encoding: UTF-8
2
2
  def should_support_sqlite3_import_functionality
3
- should_support_on_duplicate_key_ignore
3
+ if ActiveRecord::Base.connection.supports_on_duplicate_key_update?
4
+ should_support_sqlite_upsert_functionality
5
+ end
4
6
 
5
7
  describe "#supports_imports?" do
6
- context "and SQLite is 3.7.11 or higher" do
7
- it "supports import" do
8
- version = ActiveRecord::ConnectionAdapters::SQLite3Adapter::Version.new("3.7.11")
9
- assert ActiveRecord::Base.supports_import?(version)
10
-
11
- version = ActiveRecord::ConnectionAdapters::SQLite3Adapter::Version.new("3.7.12")
12
- assert ActiveRecord::Base.supports_import?(version)
13
- end
14
- end
15
-
16
- context "and SQLite less than 3.7.11" do
17
- it "doesn't support import" do
18
- version = ActiveRecord::ConnectionAdapters::SQLite3Adapter::Version.new("3.7.10")
19
- assert !ActiveRecord::Base.supports_import?(version)
20
- end
8
+ it "should support import" do
9
+ assert ActiveRecord::Base.supports_import?
21
10
  end
22
11
  end
23
12
 
@@ -49,18 +38,193 @@ def should_support_sqlite3_import_functionality
49
38
  assert_equal 2500, Topic.count, "Failed to insert all records. Make sure you have a supported version of SQLite3 (3.7.11 or higher) installed"
50
39
  end
51
40
  end
41
+ end
42
+ end
43
+
44
+ def should_support_sqlite_upsert_functionality
45
+ should_support_basic_on_duplicate_key_update
46
+ should_support_on_duplicate_key_ignore
47
+
48
+ describe "#import" do
49
+ extend ActiveSupport::TestCase::ImportAssertions
50
+
51
+ macro(:perform_import) { raise "supply your own #perform_import in a context below" }
52
+ macro(:updated_topic) { Topic.find(@topic.id) }
53
+
54
+ context "with :on_duplicate_key_ignore and validation checks turned off" do
55
+ let(:columns) { %w( id title author_name author_email_address parent_id ) }
56
+ let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] }
57
+ let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] }
58
+
59
+ setup do
60
+ Topic.import columns, values, validate: false
61
+ end
62
+
63
+ it "should not update any records" do
64
+ result = Topic.import columns, updated_values, on_duplicate_key_ignore: true, validate: false
65
+ assert_equal [], result.ids
66
+ end
67
+ end
68
+
69
+ context "with :on_duplicate_key_update and validation checks turned off" do
70
+ asssertion_group(:should_support_on_duplicate_key_update) do
71
+ should_not_update_fields_not_mentioned
72
+ should_update_foreign_keys
73
+ should_not_update_created_at_on_timestamp_columns
74
+ should_update_updated_at_on_timestamp_columns
75
+ end
76
+
77
+ context "using a hash" do
78
+ context "with :columns a hash" do
79
+ let(:columns) { %w( id title author_name author_email_address parent_id ) }
80
+ let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] }
81
+ let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] }
82
+
83
+ macro(:perform_import) do |*opts|
84
+ Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id, columns: update_columns }, validate: false)
85
+ end
86
+
87
+ setup do
88
+ Topic.import columns, values, validate: false
89
+ @topic = Topic.find 99
90
+ end
91
+
92
+ it "should not modify the passed in :on_duplicate_key_update columns array" do
93
+ assert_nothing_raised do
94
+ columns = %w(title author_name).freeze
95
+ Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: { columns: columns }
96
+ end
97
+ end
98
+
99
+ context "using string hash map" do
100
+ let(:update_columns) { { "title" => "title", "author_email_address" => "author_email_address", "parent_id" => "parent_id" } }
101
+ should_support_on_duplicate_key_update
102
+ should_update_fields_mentioned
103
+ end
104
+
105
+ context "using string hash map, but specifying column mismatches" do
106
+ let(:update_columns) { { "title" => "author_email_address", "author_email_address" => "title", "parent_id" => "parent_id" } }
107
+ should_support_on_duplicate_key_update
108
+ should_update_fields_mentioned_with_hash_mappings
109
+ end
110
+
111
+ context "using symbol hash map" do
112
+ let(:update_columns) { { title: :title, author_email_address: :author_email_address, parent_id: :parent_id } }
113
+ should_support_on_duplicate_key_update
114
+ should_update_fields_mentioned
115
+ end
116
+
117
+ context "using symbol hash map, but specifying column mismatches" do
118
+ let(:update_columns) { { title: :author_email_address, author_email_address: :title, parent_id: :parent_id } }
119
+ should_support_on_duplicate_key_update
120
+ should_update_fields_mentioned_with_hash_mappings
121
+ end
122
+ end
123
+
124
+ context 'with :index_predicate' do
125
+ let(:columns) { %w( id device_id alarm_type status metadata ) }
126
+ let(:values) { [[99, 17, 1, 1, 'foo']] }
127
+ let(:updated_values) { [[99, 17, 1, 2, 'bar']] }
128
+
129
+ macro(:perform_import) do |*opts|
130
+ Alarm.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: [:device_id, :alarm_type], index_predicate: 'status <> 0', columns: [:status] }, validate: false)
131
+ end
132
+
133
+ macro(:updated_alarm) { Alarm.find(@alarm.id) }
134
+
135
+ setup do
136
+ Alarm.import columns, values, validate: false
137
+ @alarm = Alarm.find 99
138
+ end
139
+
140
+ context 'supports on duplicate key update for partial indexes' do
141
+ it 'should not update created_at timestamp columns' do
142
+ Timecop.freeze Chronic.parse("5 minutes from now") do
143
+ perform_import
144
+ assert_in_delta @alarm.created_at.to_i, updated_alarm.created_at.to_i, 1
145
+ end
146
+ end
147
+
148
+ it 'should update updated_at timestamp columns' do
149
+ time = Chronic.parse("5 minutes from now")
150
+ Timecop.freeze time do
151
+ perform_import
152
+ assert_in_delta time.to_i, updated_alarm.updated_at.to_i, 1
153
+ end
154
+ end
155
+
156
+ it 'should not update fields not mentioned' do
157
+ perform_import
158
+ assert_equal 'foo', updated_alarm.metadata
159
+ end
160
+
161
+ it 'should update fields mentioned with hash mappings' do
162
+ perform_import
163
+ assert_equal 2, updated_alarm.status
164
+ end
165
+ end
166
+ end
167
+
168
+ context 'with :condition' do
169
+ let(:columns) { %w( id device_id alarm_type status metadata) }
170
+ let(:values) { [[99, 17, 1, 1, 'foo']] }
171
+ let(:updated_values) { [[99, 17, 1, 1, 'bar']] }
172
+
173
+ macro(:perform_import) do |*opts|
174
+ Alarm.import(
175
+ columns,
176
+ updated_values,
177
+ opts.extract_options!.merge(
178
+ on_duplicate_key_update: {
179
+ conflict_target: [:id],
180
+ condition: "alarms.metadata NOT LIKE '%foo%'",
181
+ columns: [:metadata]
182
+ },
183
+ validate: false
184
+ )
185
+ )
186
+ end
187
+
188
+ macro(:updated_alarm) { Alarm.find(@alarm.id) }
189
+
190
+ setup do
191
+ Alarm.import columns, values, validate: false
192
+ @alarm = Alarm.find 99
193
+ end
194
+
195
+ it 'should not update fields not matched' do
196
+ perform_import
197
+ assert_equal 'foo', updated_alarm.metadata
198
+ end
199
+ end
200
+
201
+ context "with no :conflict_target" do
202
+ context "with no primary key" do
203
+ it "raises ArgumentError" do
204
+ error = assert_raises ArgumentError do
205
+ Rule.import Build(3, :rules), on_duplicate_key_update: [:condition_text], validate: false
206
+ end
207
+ assert_match(/Expected :conflict_target to be specified/, error.message)
208
+ end
209
+ end
210
+ end
211
+
212
+ context "with no :columns" do
213
+ let(:columns) { %w( id title author_name author_email_address ) }
214
+ let(:values) { [[100, "Book", "John Doe", "john@doe.com"]] }
215
+ let(:updated_values) { [[100, "Title Should Not Change", "Author Should Not Change", "john@nogo.com"]] }
52
216
 
53
- context "with :on_duplicate_key_update" do
54
- let(:topics) { Build(1, :topics) }
217
+ macro(:perform_import) do |*opts|
218
+ Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: { conflict_target: :id }, validate: false)
219
+ end
55
220
 
56
- it "should log a warning message" do
57
- log = StringIO.new
58
- logger = Logger.new(log)
59
- logger.level = Logger::WARN
60
- ActiveRecord::Base.connection.stubs(:logger).returns(logger)
221
+ setup do
222
+ Topic.import columns, values, validate: false
223
+ @topic = Topic.find 100
224
+ end
61
225
 
62
- Topic.import topics, on_duplicate_key_update: true
63
- assert_match(/Ignoring on_duplicate_key_update/, log.string)
226
+ should_update_updated_at_on_timestamp_columns
227
+ end
64
228
  end
65
229
  end
66
230
  end
data/test/test_helper.rb CHANGED
@@ -1,4 +1,5 @@
1
1
  require 'pathname'
2
+ require 'rake'
2
3
  test_dir = Pathname.new File.dirname(__FILE__)
3
4
  $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
4
5
  $LOAD_PATH.unshift(File.dirname(__FILE__))
@@ -33,6 +34,14 @@ rescue LoadError
33
34
  ENV["SKIP_COMPOSITE_PK"] = "true"
34
35
  end
35
36
 
37
+ # Support MySQL 5.7
38
+ if ActiveSupport::VERSION::STRING < "4.1"
39
+ require "active_record/connection_adapters/mysql2_adapter"
40
+ class ActiveRecord::ConnectionAdapters::Mysql2Adapter
41
+ NATIVE_DATABASE_TYPES[:primary_key] = "int(11) auto_increment PRIMARY KEY"
42
+ end
43
+ end
44
+
36
45
  require "ruby-debug" if RUBY_VERSION.to_f < 1.9
37
46
 
38
47
  adapter = ENV["ARE_DB"] || "sqlite3"
@@ -40,8 +49,24 @@ adapter = ENV["ARE_DB"] || "sqlite3"
40
49
  FileUtils.mkdir_p 'log'
41
50
  ActiveRecord::Base.logger = Logger.new("log/test.log")
42
51
  ActiveRecord::Base.logger.level = Logger::DEBUG
43
- ActiveRecord::Base.configurations["test"] = YAML.load_file(test_dir.join("database.yml"))[adapter]
44
- ActiveRecord::Base.default_timezone = :utc
52
+
53
+ if ENV['AR_VERSION'].to_f >= 6.0
54
+ yaml_config = if Gem::Version.new(Psych::VERSION) >= Gem::Version.new('3.2.1')
55
+ YAML.safe_load_file(test_dir.join("database.yml"), aliases: true)[adapter]
56
+ else
57
+ YAML.load_file(test_dir.join("database.yml"))[adapter]
58
+ end
59
+ config = ActiveRecord::DatabaseConfigurations::HashConfig.new("test", adapter, yaml_config)
60
+ ActiveRecord::Base.configurations.configurations << config
61
+ else
62
+ ActiveRecord::Base.configurations["test"] = YAML.load_file(test_dir.join("database.yml"))[adapter]
63
+ end
64
+
65
+ if ActiveRecord.respond_to?(:default_timezone)
66
+ ActiveRecord.default_timezone = :utc
67
+ else
68
+ ActiveRecord::Base.default_timezone = :utc
69
+ end
45
70
 
46
71
  require "activerecord-import"
47
72
  ActiveRecord::Base.establish_connection :test
@@ -50,7 +75,7 @@ ActiveSupport::Notifications.subscribe(/active_record.sql/) do |_, _, _, _, hsh|
50
75
  ActiveRecord::Base.logger.info hsh[:sql]
51
76
  end
52
77
 
53
- require "factory_girl"
78
+ require "factory_bot"
54
79
  Dir[File.dirname(__FILE__) + "/support/**/*.rb"].each { |file| require file }
55
80
 
56
81
  # Load base/generic schema