activerecord-import 1.0.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +32 -0
- data/.rubocop.yml +49 -0
- data/.rubocop_todo.yml +36 -0
- data/.travis.yml +74 -0
- data/Brewfile +3 -0
- data/CHANGELOG.md +430 -0
- data/Gemfile +59 -0
- data/LICENSE +56 -0
- data/README.markdown +619 -0
- data/Rakefile +68 -0
- data/activerecord-import.gemspec +23 -0
- data/benchmarks/README +32 -0
- data/benchmarks/benchmark.rb +68 -0
- data/benchmarks/lib/base.rb +138 -0
- data/benchmarks/lib/cli_parser.rb +107 -0
- data/benchmarks/lib/float.rb +15 -0
- data/benchmarks/lib/mysql2_benchmark.rb +19 -0
- data/benchmarks/lib/output_to_csv.rb +19 -0
- data/benchmarks/lib/output_to_html.rb +64 -0
- data/benchmarks/models/test_innodb.rb +3 -0
- data/benchmarks/models/test_memory.rb +3 -0
- data/benchmarks/models/test_myisam.rb +3 -0
- data/benchmarks/schema/mysql_schema.rb +16 -0
- data/gemfiles/3.2.gemfile +2 -0
- data/gemfiles/4.0.gemfile +2 -0
- data/gemfiles/4.1.gemfile +2 -0
- data/gemfiles/4.2.gemfile +2 -0
- data/gemfiles/5.0.gemfile +2 -0
- data/gemfiles/5.1.gemfile +2 -0
- data/gemfiles/5.2.gemfile +2 -0
- data/gemfiles/6.0.gemfile +1 -0
- data/gemfiles/6.1.gemfile +1 -0
- data/lib/activerecord-import.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/abstract_adapter.rb +9 -0
- data/lib/activerecord-import/active_record/adapters/jdbcmysql_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/jdbcpostgresql_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/jdbcsqlite3_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/mysql2_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/postgresql_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/seamless_database_pool_adapter.rb +7 -0
- data/lib/activerecord-import/active_record/adapters/sqlite3_adapter.rb +6 -0
- data/lib/activerecord-import/adapters/abstract_adapter.rb +66 -0
- data/lib/activerecord-import/adapters/em_mysql2_adapter.rb +5 -0
- data/lib/activerecord-import/adapters/mysql2_adapter.rb +5 -0
- data/lib/activerecord-import/adapters/mysql_adapter.rb +129 -0
- data/lib/activerecord-import/adapters/postgresql_adapter.rb +217 -0
- data/lib/activerecord-import/adapters/sqlite3_adapter.rb +180 -0
- data/lib/activerecord-import/base.rb +43 -0
- data/lib/activerecord-import/import.rb +1059 -0
- data/lib/activerecord-import/mysql2.rb +7 -0
- data/lib/activerecord-import/postgresql.rb +7 -0
- data/lib/activerecord-import/sqlite3.rb +7 -0
- data/lib/activerecord-import/synchronize.rb +66 -0
- data/lib/activerecord-import/value_sets_parser.rb +77 -0
- data/lib/activerecord-import/version.rb +5 -0
- data/test/adapters/jdbcmysql.rb +1 -0
- data/test/adapters/jdbcpostgresql.rb +1 -0
- data/test/adapters/jdbcsqlite3.rb +1 -0
- data/test/adapters/makara_postgis.rb +1 -0
- data/test/adapters/mysql2.rb +1 -0
- data/test/adapters/mysql2_makara.rb +1 -0
- data/test/adapters/mysql2spatial.rb +1 -0
- data/test/adapters/postgis.rb +1 -0
- data/test/adapters/postgresql.rb +1 -0
- data/test/adapters/postgresql_makara.rb +1 -0
- data/test/adapters/seamless_database_pool.rb +1 -0
- data/test/adapters/spatialite.rb +1 -0
- data/test/adapters/sqlite3.rb +1 -0
- data/test/database.yml.sample +52 -0
- data/test/import_test.rb +903 -0
- data/test/jdbcmysql/import_test.rb +5 -0
- data/test/jdbcpostgresql/import_test.rb +4 -0
- data/test/jdbcsqlite3/import_test.rb +4 -0
- data/test/makara_postgis/import_test.rb +8 -0
- data/test/models/account.rb +3 -0
- data/test/models/alarm.rb +2 -0
- data/test/models/bike_maker.rb +7 -0
- data/test/models/book.rb +9 -0
- data/test/models/car.rb +3 -0
- data/test/models/chapter.rb +4 -0
- data/test/models/dictionary.rb +4 -0
- data/test/models/discount.rb +3 -0
- data/test/models/end_note.rb +4 -0
- data/test/models/group.rb +3 -0
- data/test/models/promotion.rb +3 -0
- data/test/models/question.rb +3 -0
- data/test/models/rule.rb +3 -0
- data/test/models/tag.rb +4 -0
- data/test/models/topic.rb +23 -0
- data/test/models/user.rb +3 -0
- data/test/models/user_token.rb +4 -0
- data/test/models/vendor.rb +7 -0
- data/test/models/widget.rb +24 -0
- data/test/mysql2/import_test.rb +5 -0
- data/test/mysql2_makara/import_test.rb +6 -0
- data/test/mysqlspatial2/import_test.rb +6 -0
- data/test/postgis/import_test.rb +8 -0
- data/test/postgresql/import_test.rb +4 -0
- data/test/schema/generic_schema.rb +194 -0
- data/test/schema/jdbcpostgresql_schema.rb +1 -0
- data/test/schema/mysql2_schema.rb +19 -0
- data/test/schema/postgis_schema.rb +1 -0
- data/test/schema/postgresql_schema.rb +47 -0
- data/test/schema/sqlite3_schema.rb +13 -0
- data/test/schema/version.rb +10 -0
- data/test/sqlite3/import_test.rb +4 -0
- data/test/support/active_support/test_case_extensions.rb +75 -0
- data/test/support/assertions.rb +73 -0
- data/test/support/factories.rb +64 -0
- data/test/support/generate.rb +29 -0
- data/test/support/mysql/import_examples.rb +98 -0
- data/test/support/postgresql/import_examples.rb +563 -0
- data/test/support/shared_examples/on_duplicate_key_ignore.rb +43 -0
- data/test/support/shared_examples/on_duplicate_key_update.rb +368 -0
- data/test/support/shared_examples/recursive_import.rb +216 -0
- data/test/support/sqlite3/import_examples.rb +231 -0
- data/test/synchronize_test.rb +41 -0
- data/test/test_helper.rb +75 -0
- data/test/travis/database.yml +66 -0
- data/test/value_sets_bytes_parser_test.rb +104 -0
- data/test/value_sets_records_parser_test.rb +32 -0
- metadata +259 -0
@@ -0,0 +1,43 @@
|
|
1
|
+
def should_support_on_duplicate_key_ignore
|
2
|
+
describe "#import" do
|
3
|
+
extend ActiveSupport::TestCase::ImportAssertions
|
4
|
+
let(:topic) { Topic.create!(title: "Book", author_name: "John Doe") }
|
5
|
+
let(:topics) { [topic] }
|
6
|
+
|
7
|
+
context "with :on_duplicate_key_ignore" do
|
8
|
+
it "should skip duplicates and continue import" do
|
9
|
+
topics << Topic.new(title: "Book 2", author_name: "Jane Doe")
|
10
|
+
assert_difference "Topic.count", +1 do
|
11
|
+
result = Topic.import topics, on_duplicate_key_ignore: true, validate: false
|
12
|
+
assert_not_equal topics.first.id, result.ids.first
|
13
|
+
assert_nil topics.last.id
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
unless ENV["SKIP_COMPOSITE_PK"]
|
18
|
+
context "with composite primary keys" do
|
19
|
+
it "should import array of values successfully" do
|
20
|
+
columns = [:tag_id, :publisher_id, :tag]
|
21
|
+
values = [[1, 1, 'Mystery'], [1, 1, 'Science']]
|
22
|
+
|
23
|
+
assert_difference "Tag.count", +1 do
|
24
|
+
Tag.import columns, values, on_duplicate_key_ignore: true, validate: false
|
25
|
+
end
|
26
|
+
assert_equal 'Mystery', Tag.first.tag
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
context "with :ignore" do
|
33
|
+
it "should skip duplicates and continue import" do
|
34
|
+
topics << Topic.new(title: "Book 2", author_name: "Jane Doe")
|
35
|
+
assert_difference "Topic.count", +1 do
|
36
|
+
result = Topic.import topics, ignore: true, validate: false
|
37
|
+
assert_not_equal topics.first.id, result.ids.first
|
38
|
+
assert_nil topics.last.id
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,368 @@
|
|
1
|
+
def should_support_basic_on_duplicate_key_update
|
2
|
+
describe "#import" do
|
3
|
+
extend ActiveSupport::TestCase::ImportAssertions
|
4
|
+
|
5
|
+
macro(:perform_import) { raise "supply your own #perform_import in a context below" }
|
6
|
+
macro(:updated_topic) { Topic.find(@topic.id) }
|
7
|
+
|
8
|
+
context "with lock_version upsert" do
|
9
|
+
describe 'optimistic lock' do
|
10
|
+
it 'lock_version upsert after on_duplcate_key_update by model' do
|
11
|
+
users = [
|
12
|
+
User.new(name: 'Salomon'),
|
13
|
+
User.new(name: 'Nathan')
|
14
|
+
]
|
15
|
+
User.import(users)
|
16
|
+
assert User.count == users.length
|
17
|
+
User.all.each do |user|
|
18
|
+
assert_equal 0, user.lock_version
|
19
|
+
end
|
20
|
+
updated_users = User.all.map do |user|
|
21
|
+
user.name += ' Rothschild'
|
22
|
+
user
|
23
|
+
end
|
24
|
+
User.import(updated_users, on_duplicate_key_update: [:name])
|
25
|
+
assert User.count == updated_users.length
|
26
|
+
User.all.each_with_index do |user, i|
|
27
|
+
assert_equal user.name, users[i].name + ' Rothschild'
|
28
|
+
assert_equal 1, user.lock_version
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
it 'lock_version upsert after on_duplcate_key_update by array' do
|
33
|
+
users = [
|
34
|
+
User.new(name: 'Salomon'),
|
35
|
+
User.new(name: 'Nathan')
|
36
|
+
]
|
37
|
+
User.import(users)
|
38
|
+
assert User.count == users.length
|
39
|
+
User.all.each do |user|
|
40
|
+
assert_equal 0, user.lock_version
|
41
|
+
end
|
42
|
+
|
43
|
+
columns = [:id, :name]
|
44
|
+
updated_values = User.all.map do |user|
|
45
|
+
user.name += ' Rothschild'
|
46
|
+
[user.id, user.name]
|
47
|
+
end
|
48
|
+
User.import(columns, updated_values, on_duplicate_key_update: [:name])
|
49
|
+
assert User.count == updated_values.length
|
50
|
+
User.all.each_with_index do |user, i|
|
51
|
+
assert_equal user.name, users[i].name + ' Rothschild'
|
52
|
+
assert_equal 1, user.lock_version
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
it 'lock_version upsert after on_duplcate_key_update by hash' do
|
57
|
+
users = [
|
58
|
+
User.new(name: 'Salomon'),
|
59
|
+
User.new(name: 'Nathan')
|
60
|
+
]
|
61
|
+
User.import(users)
|
62
|
+
assert User.count == users.length
|
63
|
+
User.all.each do |user|
|
64
|
+
assert_equal 0, user.lock_version
|
65
|
+
end
|
66
|
+
updated_values = User.all.map do |user|
|
67
|
+
user.name += ' Rothschild'
|
68
|
+
{ id: user.id, name: user.name }
|
69
|
+
end
|
70
|
+
User.import(updated_values, on_duplicate_key_update: [:name])
|
71
|
+
assert User.count == updated_values.length
|
72
|
+
User.all.each_with_index do |user, i|
|
73
|
+
assert_equal user.name, users[i].name + ' Rothschild'
|
74
|
+
assert_equal 1, user.lock_version
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
it 'upsert optimistic lock columns other than lock_version by model' do
|
79
|
+
accounts = [
|
80
|
+
Account.new(name: 'Salomon'),
|
81
|
+
Account.new(name: 'Nathan')
|
82
|
+
]
|
83
|
+
Account.import(accounts)
|
84
|
+
assert Account.count == accounts.length
|
85
|
+
Account.all.each do |user|
|
86
|
+
assert_equal 0, user.lock
|
87
|
+
end
|
88
|
+
updated_accounts = Account.all.map do |user|
|
89
|
+
user.name += ' Rothschild'
|
90
|
+
user
|
91
|
+
end
|
92
|
+
Account.import(updated_accounts, on_duplicate_key_update: [:id, :name])
|
93
|
+
assert Account.count == updated_accounts.length
|
94
|
+
Account.all.each_with_index do |user, i|
|
95
|
+
assert_equal user.name, accounts[i].name + ' Rothschild'
|
96
|
+
assert_equal 1, user.lock
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
100
|
+
it 'upsert optimistic lock columns other than lock_version by array' do
|
101
|
+
accounts = [
|
102
|
+
Account.new(name: 'Salomon'),
|
103
|
+
Account.new(name: 'Nathan')
|
104
|
+
]
|
105
|
+
Account.import(accounts)
|
106
|
+
assert Account.count == accounts.length
|
107
|
+
Account.all.each do |user|
|
108
|
+
assert_equal 0, user.lock
|
109
|
+
end
|
110
|
+
|
111
|
+
columns = [:id, :name]
|
112
|
+
updated_values = Account.all.map do |user|
|
113
|
+
user.name += ' Rothschild'
|
114
|
+
[user.id, user.name]
|
115
|
+
end
|
116
|
+
Account.import(columns, updated_values, on_duplicate_key_update: [:name])
|
117
|
+
assert Account.count == updated_values.length
|
118
|
+
Account.all.each_with_index do |user, i|
|
119
|
+
assert_equal user.name, accounts[i].name + ' Rothschild'
|
120
|
+
assert_equal 1, user.lock
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
it 'upsert optimistic lock columns other than lock_version by hash' do
|
125
|
+
accounts = [
|
126
|
+
Account.new(name: 'Salomon'),
|
127
|
+
Account.new(name: 'Nathan')
|
128
|
+
]
|
129
|
+
Account.import(accounts)
|
130
|
+
assert Account.count == accounts.length
|
131
|
+
Account.all.each do |user|
|
132
|
+
assert_equal 0, user.lock
|
133
|
+
end
|
134
|
+
updated_values = Account.all.map do |user|
|
135
|
+
user.name += ' Rothschild'
|
136
|
+
{ id: user.id, name: user.name }
|
137
|
+
end
|
138
|
+
Account.import(updated_values, on_duplicate_key_update: [:name])
|
139
|
+
assert Account.count == updated_values.length
|
140
|
+
Account.all.each_with_index do |user, i|
|
141
|
+
assert_equal user.name, accounts[i].name + ' Rothschild'
|
142
|
+
assert_equal 1, user.lock
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
it 'update the lock_version of models separated by namespaces by model' do
|
147
|
+
makers = [
|
148
|
+
Bike::Maker.new(name: 'Yamaha'),
|
149
|
+
Bike::Maker.new(name: 'Honda')
|
150
|
+
]
|
151
|
+
Bike::Maker.import(makers)
|
152
|
+
assert Bike::Maker.count == makers.length
|
153
|
+
Bike::Maker.all.each do |maker|
|
154
|
+
assert_equal 0, maker.lock_version
|
155
|
+
end
|
156
|
+
updated_makers = Bike::Maker.all.map do |maker|
|
157
|
+
maker.name += ' bikes'
|
158
|
+
maker
|
159
|
+
end
|
160
|
+
Bike::Maker.import(updated_makers, on_duplicate_key_update: [:name])
|
161
|
+
assert Bike::Maker.count == updated_makers.length
|
162
|
+
Bike::Maker.all.each_with_index do |maker, i|
|
163
|
+
assert_equal maker.name, makers[i].name + ' bikes'
|
164
|
+
assert_equal 1, maker.lock_version
|
165
|
+
end
|
166
|
+
end
|
167
|
+
it 'update the lock_version of models separated by namespaces by array' do
|
168
|
+
makers = [
|
169
|
+
Bike::Maker.new(name: 'Yamaha'),
|
170
|
+
Bike::Maker.new(name: 'Honda')
|
171
|
+
]
|
172
|
+
Bike::Maker.import(makers)
|
173
|
+
assert Bike::Maker.count == makers.length
|
174
|
+
Bike::Maker.all.each do |maker|
|
175
|
+
assert_equal 0, maker.lock_version
|
176
|
+
end
|
177
|
+
|
178
|
+
columns = [:id, :name]
|
179
|
+
updated_values = Bike::Maker.all.map do |maker|
|
180
|
+
maker.name += ' bikes'
|
181
|
+
[maker.id, maker.name]
|
182
|
+
end
|
183
|
+
Bike::Maker.import(columns, updated_values, on_duplicate_key_update: [:name])
|
184
|
+
assert Bike::Maker.count == updated_values.length
|
185
|
+
Bike::Maker.all.each_with_index do |maker, i|
|
186
|
+
assert_equal maker.name, makers[i].name + ' bikes'
|
187
|
+
assert_equal 1, maker.lock_version
|
188
|
+
end
|
189
|
+
end
|
190
|
+
|
191
|
+
it 'update the lock_version of models separated by namespaces by hash' do
|
192
|
+
makers = [
|
193
|
+
Bike::Maker.new(name: 'Yamaha'),
|
194
|
+
Bike::Maker.new(name: 'Honda')
|
195
|
+
]
|
196
|
+
Bike::Maker.import(makers)
|
197
|
+
assert Bike::Maker.count == makers.length
|
198
|
+
Bike::Maker.all.each do |maker|
|
199
|
+
assert_equal 0, maker.lock_version
|
200
|
+
end
|
201
|
+
updated_values = Bike::Maker.all.map do |maker|
|
202
|
+
maker.name += ' bikes'
|
203
|
+
{ id: maker.id, name: maker.name }
|
204
|
+
end
|
205
|
+
Bike::Maker.import(updated_values, on_duplicate_key_update: [:name])
|
206
|
+
assert Bike::Maker.count == updated_values.length
|
207
|
+
Bike::Maker.all.each_with_index do |maker, i|
|
208
|
+
assert_equal maker.name, makers[i].name + ' bikes'
|
209
|
+
assert_equal 1, maker.lock_version
|
210
|
+
end
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|
214
|
+
|
215
|
+
context "with :on_duplicate_key_update" do
|
216
|
+
describe 'using :all' do
|
217
|
+
let(:columns) { %w( id title author_name author_email_address parent_id ) }
|
218
|
+
let(:updated_values) { [[99, "Book - 2nd Edition", "Jane Doe", "janedoe@example.com", 57]] }
|
219
|
+
|
220
|
+
macro(:perform_import) do |*opts|
|
221
|
+
Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: :all, validate: false)
|
222
|
+
end
|
223
|
+
|
224
|
+
setup do
|
225
|
+
values = [[99, "Book", "John Doe", "john@doe.com", 17, 3]]
|
226
|
+
Topic.import columns + ['replies_count'], values, validate: false
|
227
|
+
end
|
228
|
+
|
229
|
+
it 'updates all specified columns' do
|
230
|
+
perform_import
|
231
|
+
updated_topic = Topic.find(99)
|
232
|
+
assert_equal 'Book - 2nd Edition', updated_topic.title
|
233
|
+
assert_equal 'Jane Doe', updated_topic.author_name
|
234
|
+
assert_equal 'janedoe@example.com', updated_topic.author_email_address
|
235
|
+
assert_equal 57, updated_topic.parent_id
|
236
|
+
assert_equal 3, updated_topic.replies_count
|
237
|
+
end
|
238
|
+
end
|
239
|
+
|
240
|
+
describe "argument safety" do
|
241
|
+
it "should not modify the passed in :on_duplicate_key_update array" do
|
242
|
+
assert_nothing_raised do
|
243
|
+
columns = %w(title author_name).freeze
|
244
|
+
Topic.import columns, [%w(foo, bar)], on_duplicate_key_update: columns
|
245
|
+
end
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
context "with timestamps enabled" do
|
250
|
+
let(:time) { Chronic.parse("5 minutes from now") }
|
251
|
+
|
252
|
+
it 'should not overwrite changed updated_at with current timestamp' do
|
253
|
+
topic = Topic.create(author_name: "Jane Doe", title: "Book")
|
254
|
+
timestamp = Time.now.utc
|
255
|
+
topic.updated_at = timestamp
|
256
|
+
Topic.import [topic], on_duplicate_key_update: :all, validate: false
|
257
|
+
assert_equal timestamp.to_s, Topic.last.updated_at.to_s
|
258
|
+
end
|
259
|
+
|
260
|
+
it 'should update updated_at with current timestamp' do
|
261
|
+
topic = Topic.create(author_name: "Jane Doe", title: "Book")
|
262
|
+
Timecop.freeze(time) do
|
263
|
+
Topic.import [topic], on_duplicate_key_update: [:updated_at], validate: false
|
264
|
+
assert_in_delta time.to_i, topic.reload.updated_at.to_i, 1.second
|
265
|
+
end
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
context "with validation checks turned off" do
|
270
|
+
asssertion_group(:should_support_on_duplicate_key_update) do
|
271
|
+
should_not_update_fields_not_mentioned
|
272
|
+
should_update_foreign_keys
|
273
|
+
should_not_update_created_at_on_timestamp_columns
|
274
|
+
should_update_updated_at_on_timestamp_columns
|
275
|
+
end
|
276
|
+
|
277
|
+
let(:columns) { %w( id title author_name author_email_address parent_id ) }
|
278
|
+
let(:values) { [[99, "Book", "John Doe", "john@doe.com", 17]] }
|
279
|
+
let(:updated_values) { [[99, "Book - 2nd Edition", "Author Should Not Change", "johndoe@example.com", 57]] }
|
280
|
+
|
281
|
+
macro(:perform_import) do |*opts|
|
282
|
+
Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false)
|
283
|
+
end
|
284
|
+
|
285
|
+
setup do
|
286
|
+
Topic.import columns, values, validate: false
|
287
|
+
@topic = Topic.find 99
|
288
|
+
end
|
289
|
+
|
290
|
+
context "using an empty array" do
|
291
|
+
let(:update_columns) { [] }
|
292
|
+
should_not_update_fields_not_mentioned
|
293
|
+
should_update_updated_at_on_timestamp_columns
|
294
|
+
end
|
295
|
+
|
296
|
+
context "using string column names" do
|
297
|
+
let(:update_columns) { %w(title author_email_address parent_id) }
|
298
|
+
should_support_on_duplicate_key_update
|
299
|
+
should_update_fields_mentioned
|
300
|
+
end
|
301
|
+
|
302
|
+
context "using symbol column names" do
|
303
|
+
let(:update_columns) { [:title, :author_email_address, :parent_id] }
|
304
|
+
should_support_on_duplicate_key_update
|
305
|
+
should_update_fields_mentioned
|
306
|
+
end
|
307
|
+
end
|
308
|
+
|
309
|
+
context "with a table that has a non-standard primary key" do
|
310
|
+
let(:columns) { [:promotion_id, :code] }
|
311
|
+
let(:values) { [[1, 'DISCOUNT1']] }
|
312
|
+
let(:updated_values) { [[1, 'DISCOUNT2']] }
|
313
|
+
let(:update_columns) { [:code] }
|
314
|
+
|
315
|
+
macro(:perform_import) do |*opts|
|
316
|
+
Promotion.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: update_columns, validate: false)
|
317
|
+
end
|
318
|
+
macro(:updated_promotion) { Promotion.find(@promotion.promotion_id) }
|
319
|
+
|
320
|
+
setup do
|
321
|
+
Promotion.import columns, values, validate: false
|
322
|
+
@promotion = Promotion.find 1
|
323
|
+
end
|
324
|
+
|
325
|
+
it "should update specified columns" do
|
326
|
+
perform_import
|
327
|
+
assert_equal 'DISCOUNT2', updated_promotion.code
|
328
|
+
end
|
329
|
+
end
|
330
|
+
|
331
|
+
unless ENV["SKIP_COMPOSITE_PK"]
|
332
|
+
context "with composite primary keys" do
|
333
|
+
it "should import array of values successfully" do
|
334
|
+
columns = [:tag_id, :publisher_id, :tag]
|
335
|
+
Tag.import columns, [[1, 1, 'Mystery']], validate: false
|
336
|
+
|
337
|
+
assert_difference "Tag.count", +0 do
|
338
|
+
Tag.import columns, [[1, 1, 'Science']], on_duplicate_key_update: [:tag], validate: false
|
339
|
+
end
|
340
|
+
assert_equal 'Science', Tag.first.tag
|
341
|
+
end
|
342
|
+
end
|
343
|
+
end
|
344
|
+
end
|
345
|
+
|
346
|
+
context "with :on_duplicate_key_update turned off" do
|
347
|
+
let(:columns) { %w( id title author_name author_email_address parent_id ) }
|
348
|
+
let(:values) { [[100, "Book", "John Doe", "john@doe.com", 17]] }
|
349
|
+
let(:updated_values) { [[100, "Book - 2nd Edition", "This should raise an exception", "john@nogo.com", 57]] }
|
350
|
+
|
351
|
+
macro(:perform_import) do |*opts|
|
352
|
+
# `on_duplicate_key_update: false` is the tested feature
|
353
|
+
Topic.import columns, updated_values, opts.extract_options!.merge(on_duplicate_key_update: false, validate: false)
|
354
|
+
end
|
355
|
+
|
356
|
+
setup do
|
357
|
+
Topic.import columns, values, validate: false
|
358
|
+
@topic = Topic.find 100
|
359
|
+
end
|
360
|
+
|
361
|
+
it "should raise ActiveRecord::RecordNotUnique" do
|
362
|
+
assert_raise ActiveRecord::RecordNotUnique do
|
363
|
+
perform_import
|
364
|
+
end
|
365
|
+
end
|
366
|
+
end
|
367
|
+
end
|
368
|
+
end
|
@@ -0,0 +1,216 @@
|
|
1
|
+
def should_support_recursive_import
|
2
|
+
describe "importing objects with associations" do
|
3
|
+
let(:new_topics) { Build(num_topics, :topic_with_book) }
|
4
|
+
let(:new_topics_with_invalid_chapter) do
|
5
|
+
chapter = new_topics.first.books.first.chapters.first
|
6
|
+
chapter.title = nil
|
7
|
+
new_topics
|
8
|
+
end
|
9
|
+
let(:num_topics) { 3 }
|
10
|
+
let(:num_books) { 6 }
|
11
|
+
let(:num_chapters) { 18 }
|
12
|
+
let(:num_endnotes) { 24 }
|
13
|
+
|
14
|
+
let(:new_question_with_rule) { FactoryBot.build :question, :with_rule }
|
15
|
+
|
16
|
+
it 'imports top level' do
|
17
|
+
assert_difference "Topic.count", +num_topics do
|
18
|
+
Topic.import new_topics, recursive: true
|
19
|
+
new_topics.each do |topic|
|
20
|
+
assert_not_nil topic.id
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
it 'imports first level associations' do
|
26
|
+
assert_difference "Book.count", +num_books do
|
27
|
+
Topic.import new_topics, recursive: true
|
28
|
+
new_topics.each do |topic|
|
29
|
+
topic.books.each do |book|
|
30
|
+
assert_equal topic.id, book.topic_id
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
it 'imports polymorphic associations' do
|
37
|
+
discounts = Array.new(1) { |i| Discount.new(amount: i) }
|
38
|
+
books = Array.new(1) { |i| Book.new(author_name: "Author ##{i}", title: "Book ##{i}") }
|
39
|
+
books.each do |book|
|
40
|
+
book.discounts << discounts
|
41
|
+
end
|
42
|
+
Book.import books, recursive: true
|
43
|
+
books.each do |book|
|
44
|
+
book.discounts.each do |discount|
|
45
|
+
assert_not_nil discount.discountable_id
|
46
|
+
assert_equal 'Book', discount.discountable_type
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
it 'imports polymorphic associations from subclass' do
|
52
|
+
discounts = Array.new(1) { |i| Discount.new(amount: i) }
|
53
|
+
dictionaries = Array.new(1) { |i| Dictionary.new(author_name: "Author ##{i}", title: "Book ##{i}") }
|
54
|
+
dictionaries.each do |dictionary|
|
55
|
+
dictionary.discounts << discounts
|
56
|
+
end
|
57
|
+
Dictionary.import dictionaries, recursive: true
|
58
|
+
assert_equal 1, Dictionary.last.discounts.count
|
59
|
+
dictionaries.each do |dictionary|
|
60
|
+
dictionary.discounts.each do |discount|
|
61
|
+
assert_not_nil discount.discountable_id
|
62
|
+
assert_equal 'Book', discount.discountable_type
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
[{ recursive: false }, {}].each do |import_options|
|
68
|
+
it "skips recursion for #{import_options}" do
|
69
|
+
assert_difference "Book.count", 0 do
|
70
|
+
Topic.import new_topics, import_options
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
it 'imports deeper nested associations' do
|
76
|
+
assert_difference "Chapter.count", +num_chapters do
|
77
|
+
assert_difference "EndNote.count", +num_endnotes do
|
78
|
+
Topic.import new_topics, recursive: true
|
79
|
+
new_topics.each do |topic|
|
80
|
+
topic.books.each do |book|
|
81
|
+
book.chapters.each do |chapter|
|
82
|
+
assert_equal book.id, chapter.book_id
|
83
|
+
end
|
84
|
+
book.end_notes.each do |endnote|
|
85
|
+
assert_equal book.id, endnote.book_id
|
86
|
+
end
|
87
|
+
end
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
# Models are only valid if all associations are valid
|
94
|
+
it "only imports models with valid associations" do
|
95
|
+
assert_difference "Topic.count", 2 do
|
96
|
+
assert_difference "Book.count", 4 do
|
97
|
+
assert_difference "Chapter.count", 12 do
|
98
|
+
assert_difference "EndNote.count", 16 do
|
99
|
+
Topic.import new_topics_with_invalid_chapter, recursive: true
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
it "skips validation of the associations if requested" do
|
107
|
+
assert_difference "Chapter.count", +num_chapters do
|
108
|
+
Topic.import new_topics_with_invalid_chapter, validate: false, recursive: true
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
it 'imports has_one associations' do
|
113
|
+
assert_difference 'Rule.count' do
|
114
|
+
Question.import [new_question_with_rule], recursive: true
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
it "imports an imported belongs_to association id" do
|
119
|
+
first_new_topic = new_topics[0]
|
120
|
+
second_new_topic = new_topics[1]
|
121
|
+
|
122
|
+
books = first_new_topic.books.to_a
|
123
|
+
Topic.import new_topics, validate: false
|
124
|
+
|
125
|
+
assert_difference "Book.count", books.size do
|
126
|
+
Book.import books, validate: false
|
127
|
+
end
|
128
|
+
|
129
|
+
books.each do |book|
|
130
|
+
assert_equal book.topic_id, first_new_topic.id
|
131
|
+
end
|
132
|
+
|
133
|
+
books.each { |book| book.topic_id = second_new_topic.id }
|
134
|
+
assert_no_difference "Book.count", books.size do
|
135
|
+
Book.import books, validate: false, on_duplicate_key_update: [:topic_id]
|
136
|
+
end
|
137
|
+
|
138
|
+
books.each do |book|
|
139
|
+
assert_equal book.topic_id, second_new_topic.id
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
unless ENV["SKIP_COMPOSITE_PK"]
|
144
|
+
describe "with composite primary keys" do
|
145
|
+
it "should import models and set id" do
|
146
|
+
tags = []
|
147
|
+
tags << Tag.new(tag_id: 1, publisher_id: 1, tag: 'Mystery')
|
148
|
+
tags << Tag.new(tag_id: 2, publisher_id: 1, tag: 'Science')
|
149
|
+
|
150
|
+
assert_difference "Tag.count", +2 do
|
151
|
+
Tag.import tags
|
152
|
+
end
|
153
|
+
|
154
|
+
assert_equal 1, tags[0].tag_id
|
155
|
+
assert_equal 2, tags[1].tag_id
|
156
|
+
end
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
describe "all_or_none" do
|
161
|
+
[Book, Chapter, Topic, EndNote].each do |type|
|
162
|
+
it "creates #{type}" do
|
163
|
+
assert_difference "#{type}.count", 0 do
|
164
|
+
Topic.import new_topics_with_invalid_chapter, all_or_none: true, recursive: true
|
165
|
+
end
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
# If adapter supports on_duplicate_key_update, it is only applied to top level models so that SQL with invalid
|
171
|
+
# columns, keys, etc isn't generated for child associations when doing recursive import
|
172
|
+
if ActiveRecord::Base.connection.supports_on_duplicate_key_update?
|
173
|
+
describe "on_duplicate_key_update" do
|
174
|
+
let(:new_topics) { Build(1, :topic_with_book) }
|
175
|
+
|
176
|
+
it "imports objects with associations" do
|
177
|
+
assert_difference "Topic.count", +1 do
|
178
|
+
Topic.import new_topics, recursive: true, on_duplicate_key_update: [:updated_at], validate: false
|
179
|
+
new_topics.each do |topic|
|
180
|
+
assert_not_nil topic.id
|
181
|
+
end
|
182
|
+
end
|
183
|
+
end
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
# If returning option is provided, it is only applied to top level models so that SQL with invalid
|
188
|
+
# columns, keys, etc isn't generated for child associations when doing recursive import
|
189
|
+
describe "returning" do
|
190
|
+
let(:new_topics) { Build(1, :topic_with_book) }
|
191
|
+
|
192
|
+
it "imports objects with associations" do
|
193
|
+
assert_difference "Topic.count", +1 do
|
194
|
+
Topic.import new_topics, recursive: true, returning: [:content], validate: false
|
195
|
+
new_topics.each do |topic|
|
196
|
+
assert_not_nil topic.id
|
197
|
+
end
|
198
|
+
end
|
199
|
+
end
|
200
|
+
end
|
201
|
+
|
202
|
+
# If no returning option is provided, it is ignored
|
203
|
+
describe "no returning" do
|
204
|
+
let(:new_topics) { Build(1, :topic_with_book) }
|
205
|
+
|
206
|
+
it "is ignored and imports objects with associations" do
|
207
|
+
assert_difference "Topic.count", +1 do
|
208
|
+
Topic.import new_topics, recursive: true, no_returning: true, validate: false
|
209
|
+
new_topics.each do |topic|
|
210
|
+
assert_not_nil topic.id
|
211
|
+
end
|
212
|
+
end
|
213
|
+
end
|
214
|
+
end
|
215
|
+
end
|
216
|
+
end
|