bulk_insert 1.4.0 → 1.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/README.md +42 -1
- data/Rakefile +0 -5
- data/lib/bulk_insert.rb +2 -2
- data/lib/bulk_insert/statement_adapters.rb +22 -0
- data/lib/bulk_insert/statement_adapters/base_adapter.rb +21 -0
- data/lib/bulk_insert/statement_adapters/generic_adapter.rb +19 -0
- data/lib/bulk_insert/statement_adapters/mysql_adapter.rb +24 -0
- data/lib/bulk_insert/statement_adapters/postgresql_adapter.rb +28 -0
- data/lib/bulk_insert/statement_adapters/sqlite_adapter.rb +19 -0
- data/lib/bulk_insert/version.rb +2 -2
- data/lib/bulk_insert/worker.rb +63 -24
- data/test/bulk_insert/worker_test.rb +306 -1
- data/test/bulk_insert_test.rb +15 -1
- data/test/dummy/config/application.rb +1 -3
- metadata +43 -44
- data/test/dummy/db/development.sqlite3 +0 -0
- data/test/dummy/db/test.sqlite3 +0 -0
- data/test/dummy/log/development.log +0 -17
- data/test/dummy/log/test.log +0 -3396
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 5e78f757829191018747a87be6b1c6aec0a8a140b248fab6e698b217d0125575
|
4
|
+
data.tar.gz: b3e5cce589f88d49820f06681c72c833f727c2743650e7da0a84cb9fffe40eed
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: fa009cab807affa711ec0ac7c4618137401fa56e91651103947c444f835970e61c9a2855316bc0b46c3a669eb84b1f08ae0b211cfdac8f870f910dc203455830
|
7
|
+
data.tar.gz: d2f0a68185ebc1b788e5bb90509ab37240599b288d65ef26bc23b900545bc4766eb159757aa2b7ac441d99af5578da339ca1dfe9876ae11904f298443bdaa4c6
|
data/README.md
CHANGED
@@ -104,7 +104,6 @@ empty the batch so that you can add more rows to it if you want. Note
|
|
104
104
|
that all records saved together will have the same created_at/updated_at
|
105
105
|
timestamp (unless one was explicitly set).
|
106
106
|
|
107
|
-
|
108
107
|
### Batch Set Size
|
109
108
|
|
110
109
|
By default, the size of the insert is limited to 500 rows at a time.
|
@@ -149,6 +148,48 @@ Book.bulk_insert(*destination_columns, ignore: true) do |worker|
|
|
149
148
|
end
|
150
149
|
```
|
151
150
|
|
151
|
+
### Update Duplicates (MySQL, PostgreSQL)
|
152
|
+
|
153
|
+
If you don't want to ignore duplicate rows but instead want to update them
|
154
|
+
then you can use the _update_duplicates_ option. Set this option to true
|
155
|
+
(MySQL) or list unique column names (PostgreSQL) and when a duplicate row
|
156
|
+
is found the row will be updated with your new values.
|
157
|
+
Default value for this option is false.
|
158
|
+
|
159
|
+
```ruby
|
160
|
+
destination_columns = [:title, :author]
|
161
|
+
|
162
|
+
# Update duplicate rows (MySQL)
|
163
|
+
Book.bulk_insert(*destination_columns, update_duplicates: true) do |worker|
|
164
|
+
worker.add(...)
|
165
|
+
worker.add(...)
|
166
|
+
# ...
|
167
|
+
end
|
168
|
+
|
169
|
+
# Update duplicate rows (PostgreSQL)
|
170
|
+
Book.bulk_insert(*destination_columns, update_duplicates: %w[title]) do |worker|
|
171
|
+
worker.add(...)
|
172
|
+
# ...
|
173
|
+
end
|
174
|
+
```
|
175
|
+
|
176
|
+
### Return Primary Keys (PostgreSQL, PostGIS)
|
177
|
+
|
178
|
+
If you want the worker to store primary keys of inserted records, then you can
|
179
|
+
use the _return_primary_keys_ option. The worker will store a `result_sets`
|
180
|
+
array of `ActiveRecord::Result` objects. Each `ActiveRecord::Result` object
|
181
|
+
will contain the primary keys of a batch of inserted records.
|
182
|
+
|
183
|
+
```ruby
|
184
|
+
worker = Book.bulk_insert(*destination_columns, return_primary_keys: true) do
|
185
|
+
|worker|
|
186
|
+
worker.add(...)
|
187
|
+
worker.add(...)
|
188
|
+
# ...
|
189
|
+
end
|
190
|
+
|
191
|
+
worker.result_sets
|
192
|
+
```
|
152
193
|
|
153
194
|
## License
|
154
195
|
|
data/Rakefile
CHANGED
data/lib/bulk_insert.rb
CHANGED
@@ -4,9 +4,9 @@ module BulkInsert
|
|
4
4
|
extend ActiveSupport::Concern
|
5
5
|
|
6
6
|
module ClassMethods
|
7
|
-
def bulk_insert(*columns, values: nil, set_size:500, ignore: false)
|
7
|
+
def bulk_insert(*columns, values: nil, set_size:500, ignore: false, update_duplicates: false, return_primary_keys: false)
|
8
8
|
columns = default_bulk_columns if columns.empty?
|
9
|
-
worker = BulkInsert::Worker.new(connection, table_name, columns, set_size, ignore)
|
9
|
+
worker = BulkInsert::Worker.new(connection, table_name, primary_key, columns, set_size, ignore, update_duplicates, return_primary_keys)
|
10
10
|
|
11
11
|
if values.present?
|
12
12
|
transaction do
|
@@ -0,0 +1,22 @@
|
|
1
|
+
require_relative 'statement_adapters/generic_adapter'
|
2
|
+
require_relative 'statement_adapters/mysql_adapter'
|
3
|
+
require_relative 'statement_adapters/postgresql_adapter'
|
4
|
+
require_relative 'statement_adapters/sqlite_adapter'
|
5
|
+
|
6
|
+
module BulkInsert
|
7
|
+
module StatementAdapters
|
8
|
+
def adapter_for(connection)
|
9
|
+
case connection.adapter_name
|
10
|
+
when /^mysql/i
|
11
|
+
MySQLAdapter.new
|
12
|
+
when /\APost(?:greSQL|GIS)/i
|
13
|
+
PostgreSQLAdapter.new
|
14
|
+
when /\ASQLite/i
|
15
|
+
SQLiteAdapter.new
|
16
|
+
else
|
17
|
+
GenericAdapter.new
|
18
|
+
end
|
19
|
+
end
|
20
|
+
module_function :adapter_for
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module BulkInsert
|
2
|
+
module StatementAdapters
|
3
|
+
class BaseAdapter
|
4
|
+
def initialize
|
5
|
+
raise "You cannot initialize base adapter" if self.class == BaseAdapter
|
6
|
+
end
|
7
|
+
|
8
|
+
def insert_ignore_statement
|
9
|
+
raise "Not implemented"
|
10
|
+
end
|
11
|
+
|
12
|
+
def on_conflict_statement(_columns, _ignore, _update_duplicates)
|
13
|
+
raise "Not implemented"
|
14
|
+
end
|
15
|
+
|
16
|
+
def primary_key_return_statement(_primary_key)
|
17
|
+
raise "Not implemented"
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class GenericAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
''
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(_columns, _ignore, _update_duplicates)
|
11
|
+
''
|
12
|
+
end
|
13
|
+
|
14
|
+
def primary_key_return_statement(_primary_key)
|
15
|
+
''
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class MySQLAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
'IGNORE'
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(columns, _ignore, update_duplicates)
|
11
|
+
return '' unless update_duplicates
|
12
|
+
|
13
|
+
update_values = columns.map do |column|
|
14
|
+
"`#{column.name}`=VALUES(`#{column.name}`)"
|
15
|
+
end.join(', ')
|
16
|
+
' ON DUPLICATE KEY UPDATE ' + update_values
|
17
|
+
end
|
18
|
+
|
19
|
+
def primary_key_return_statement(_primary_key)
|
20
|
+
''
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class PostgreSQLAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
''
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(columns, ignore, update_duplicates)
|
11
|
+
if ignore
|
12
|
+
' ON CONFLICT DO NOTHING'
|
13
|
+
elsif update_duplicates
|
14
|
+
update_values = columns.map do |column|
|
15
|
+
"#{column.name}=EXCLUDED.#{column.name}"
|
16
|
+
end.join(', ')
|
17
|
+
' ON CONFLICT(' + update_duplicates.join(', ') + ') DO UPDATE SET ' + update_values
|
18
|
+
else
|
19
|
+
''
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def primary_key_return_statement(primary_key)
|
24
|
+
" RETURNING #{primary_key}"
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class SQLiteAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
'OR IGNORE'
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(_columns, _ignore, _update_duplicates)
|
11
|
+
''
|
12
|
+
end
|
13
|
+
|
14
|
+
def primary_key_return_statement(_primary_key)
|
15
|
+
''
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
data/lib/bulk_insert/version.rb
CHANGED
data/lib/bulk_insert/worker.rb
CHANGED
@@ -1,24 +1,38 @@
|
|
1
|
+
require_relative 'statement_adapters'
|
2
|
+
|
1
3
|
module BulkInsert
|
2
4
|
class Worker
|
3
5
|
attr_reader :connection
|
4
6
|
attr_accessor :set_size
|
7
|
+
attr_accessor :before_save_callback
|
5
8
|
attr_accessor :after_save_callback
|
9
|
+
attr_accessor :adapter_name
|
10
|
+
attr_reader :ignore, :update_duplicates, :result_sets
|
11
|
+
|
12
|
+
def initialize(connection, table_name, primary_key, column_names, set_size=500, ignore=false, update_duplicates=false, return_primary_keys=false)
|
13
|
+
@statement_adapter = StatementAdapters.adapter_for(connection)
|
6
14
|
|
7
|
-
def initialize(connection, table_name, column_names, set_size=500, ignore=false)
|
8
15
|
@connection = connection
|
9
16
|
@set_size = set_size
|
17
|
+
|
18
|
+
@adapter_name = connection.adapter_name
|
10
19
|
# INSERT IGNORE only fails inserts with duplicate keys or unallowed nulls not the whole set of inserts
|
11
|
-
@ignore = ignore
|
20
|
+
@ignore = ignore
|
21
|
+
@update_duplicates = update_duplicates
|
22
|
+
@return_primary_keys = return_primary_keys
|
12
23
|
|
13
24
|
columns = connection.columns(table_name)
|
14
25
|
column_map = columns.inject({}) { |h, c| h.update(c.name => c) }
|
15
26
|
|
27
|
+
@primary_key = primary_key
|
16
28
|
@columns = column_names.map { |name| column_map[name.to_s] }
|
17
29
|
@table_name = connection.quote_table_name(table_name)
|
18
30
|
@column_names = column_names.map { |name| connection.quote_column_name(name) }.join(",")
|
19
31
|
|
32
|
+
@before_save_callback = nil
|
20
33
|
@after_save_callback = nil
|
21
34
|
|
35
|
+
@result_sets = []
|
22
36
|
@set = []
|
23
37
|
end
|
24
38
|
|
@@ -58,40 +72,65 @@ module BulkInsert
|
|
58
72
|
self
|
59
73
|
end
|
60
74
|
|
75
|
+
def before_save(&block)
|
76
|
+
@before_save_callback = block
|
77
|
+
end
|
78
|
+
|
61
79
|
def after_save(&block)
|
62
80
|
@after_save_callback = block
|
63
81
|
end
|
64
82
|
|
65
83
|
def save!
|
66
84
|
if pending?
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
85
|
+
@before_save_callback.(@set) if @before_save_callback
|
86
|
+
execute_query
|
87
|
+
@after_save_callback.() if @after_save_callback
|
88
|
+
@set.clear
|
89
|
+
end
|
90
|
+
|
91
|
+
self
|
92
|
+
end
|
93
|
+
|
94
|
+
def execute_query
|
95
|
+
if query = compose_insert_query
|
96
|
+
result_set = @connection.exec_query(query)
|
97
|
+
@result_sets.push(result_set) if @return_primary_keys
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
def compose_insert_query
|
102
|
+
sql = insert_sql_statement
|
103
|
+
@now = Time.now
|
104
|
+
rows = []
|
105
|
+
|
106
|
+
@set.each do |row|
|
107
|
+
values = []
|
108
|
+
@columns.zip(row) do |column, value|
|
109
|
+
value = @now if value == :__timestamp_placeholder
|
110
|
+
|
111
|
+
if ActiveRecord::VERSION::STRING >= "5.0.0"
|
112
|
+
value = @connection.type_cast_from_column(column, value) if column
|
113
|
+
values << @connection.quote(value)
|
114
|
+
else
|
115
|
+
values << @connection.quote(value, column)
|
82
116
|
end
|
83
|
-
rows << "(#{values.join(',')})"
|
84
117
|
end
|
118
|
+
rows << "(#{values.join(',')})"
|
119
|
+
end
|
85
120
|
|
121
|
+
if !rows.empty?
|
86
122
|
sql << rows.join(",")
|
87
|
-
@
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
123
|
+
sql << @statement_adapter.on_conflict_statement(@columns, ignore, update_duplicates)
|
124
|
+
sql << @statement_adapter.primary_key_return_statement(@primary_key) if @return_primary_keys
|
125
|
+
sql
|
126
|
+
else
|
127
|
+
false
|
92
128
|
end
|
129
|
+
end
|
93
130
|
|
94
|
-
|
131
|
+
def insert_sql_statement
|
132
|
+
insert_ignore = @ignore ? @statement_adapter.insert_ignore_statement : ''
|
133
|
+
"INSERT #{insert_ignore} INTO #{@table_name} (#{@column_names}) VALUES "
|
95
134
|
end
|
96
135
|
end
|
97
136
|
end
|
@@ -1,3 +1,4 @@
|
|
1
|
+
require 'minitest/mock'
|
1
2
|
require 'test_helper'
|
2
3
|
|
3
4
|
class BulkInsertWorkerTest < ActiveSupport::TestCase
|
@@ -5,6 +6,7 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase
|
|
5
6
|
@insert = BulkInsert::Worker.new(
|
6
7
|
Testing.connection,
|
7
8
|
Testing.table_name,
|
9
|
+
'id',
|
8
10
|
%w(greeting age happy created_at updated_at color))
|
9
11
|
@now = Time.now
|
10
12
|
end
|
@@ -121,6 +123,53 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase
|
|
121
123
|
assert_equal true, hello.happy?
|
122
124
|
end
|
123
125
|
|
126
|
+
test "save! does not add to result sets when not returning primary keys" do
|
127
|
+
@insert.add greeting: "first"
|
128
|
+
@insert.add greeting: "second"
|
129
|
+
@insert.save!
|
130
|
+
|
131
|
+
assert_equal 0, @insert.result_sets.count
|
132
|
+
end
|
133
|
+
|
134
|
+
|
135
|
+
test "save! adds to result sets when returning primary keys" do
|
136
|
+
worker = BulkInsert::Worker.new(
|
137
|
+
Testing.connection,
|
138
|
+
Testing.table_name,
|
139
|
+
'id',
|
140
|
+
%w(greeting age happy created_at updated_at color),
|
141
|
+
500,
|
142
|
+
false,
|
143
|
+
false,
|
144
|
+
true
|
145
|
+
)
|
146
|
+
|
147
|
+
assert_no_difference -> { worker.result_sets.count } do
|
148
|
+
worker.save!
|
149
|
+
end
|
150
|
+
|
151
|
+
worker.add greeting: "first"
|
152
|
+
worker.add greeting: "second"
|
153
|
+
worker.save!
|
154
|
+
assert_equal 1, worker.result_sets.count
|
155
|
+
|
156
|
+
worker.add greeting: "third"
|
157
|
+
worker.add greeting: "fourth"
|
158
|
+
worker.save!
|
159
|
+
assert_equal 2, worker.result_sets.count
|
160
|
+
end
|
161
|
+
|
162
|
+
test "initialized with empty result sets array" do
|
163
|
+
new_worker = BulkInsert::Worker.new(
|
164
|
+
Testing.connection,
|
165
|
+
Testing.table_name,
|
166
|
+
'id',
|
167
|
+
%w(greeting age happy created_at updated_at color)
|
168
|
+
)
|
169
|
+
assert_instance_of(Array, new_worker.result_sets)
|
170
|
+
assert_empty new_worker.result_sets
|
171
|
+
end
|
172
|
+
|
124
173
|
test "save! calls the after_save handler" do
|
125
174
|
x = 41
|
126
175
|
|
@@ -150,5 +199,261 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase
|
|
150
199
|
|
151
200
|
assert_equal "hello", @insert.after_save_callback.()
|
152
201
|
end
|
153
|
-
end
|
154
202
|
|
203
|
+
test "save! calls the before_save handler" do
|
204
|
+
x = 41
|
205
|
+
|
206
|
+
@insert.before_save do
|
207
|
+
x += 1
|
208
|
+
end
|
209
|
+
|
210
|
+
@insert.add ["Yo", 15, false, @now, @now]
|
211
|
+
@insert.add ["Hello", 25, true, @now, @now]
|
212
|
+
@insert.save!
|
213
|
+
|
214
|
+
assert_equal 42, x
|
215
|
+
end
|
216
|
+
|
217
|
+
test "before_save stores a block as a proc" do
|
218
|
+
@insert.before_save do
|
219
|
+
"hello"
|
220
|
+
end
|
221
|
+
|
222
|
+
assert_equal "hello", @insert.before_save_callback.()
|
223
|
+
end
|
224
|
+
|
225
|
+
test "before_save_callback can be set as a proc" do
|
226
|
+
@insert.before_save_callback = -> do
|
227
|
+
"hello"
|
228
|
+
end
|
229
|
+
|
230
|
+
assert_equal "hello", @insert.before_save_callback.()
|
231
|
+
end
|
232
|
+
|
233
|
+
test "before_save can manipulate the set" do
|
234
|
+
@insert.before_save do |set|
|
235
|
+
set.reject!{|row| row[0] == "Yo"}
|
236
|
+
end
|
237
|
+
|
238
|
+
@insert.add ["Yo", 15, false, @now, @now]
|
239
|
+
@insert.add ["Hello", 25, true, @now, @now]
|
240
|
+
@insert.save!
|
241
|
+
|
242
|
+
yo = Testing.find_by(greeting: 'Yo')
|
243
|
+
hello = Testing.find_by(greeting: 'Hello')
|
244
|
+
|
245
|
+
assert_nil yo
|
246
|
+
assert_not_nil hello
|
247
|
+
end
|
248
|
+
|
249
|
+
test "save! doesn't blow up if before_save emptying the set" do
|
250
|
+
@insert.before_save do |set|
|
251
|
+
set.clear
|
252
|
+
end
|
253
|
+
|
254
|
+
@insert.add ["Yo", 15, false, @now, @now]
|
255
|
+
@insert.add ["Hello", 25, true, @now, @now]
|
256
|
+
@insert.save!
|
257
|
+
|
258
|
+
yo = Testing.find_by(greeting: 'Yo')
|
259
|
+
hello = Testing.find_by(greeting: 'Hello')
|
260
|
+
|
261
|
+
assert_nil yo
|
262
|
+
assert_nil hello
|
263
|
+
end
|
264
|
+
|
265
|
+
test "adapter dependent default methods" do
|
266
|
+
assert_equal @insert.adapter_name, 'SQLite'
|
267
|
+
assert_equal @insert.insert_sql_statement, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES "
|
268
|
+
|
269
|
+
@insert.add ["Yo", 15, false, nil, nil]
|
270
|
+
assert_equal @insert.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
271
|
+
end
|
272
|
+
|
273
|
+
test "adapter dependent mysql methods" do
|
274
|
+
connection = Testing.connection
|
275
|
+
connection.stub :adapter_name, 'MySQL' do
|
276
|
+
mysql_worker = BulkInsert::Worker.new(
|
277
|
+
connection,
|
278
|
+
Testing.table_name,
|
279
|
+
'id',
|
280
|
+
%w(greeting age happy created_at updated_at color),
|
281
|
+
500, # batch size
|
282
|
+
true # ignore
|
283
|
+
)
|
284
|
+
|
285
|
+
assert_equal mysql_worker.adapter_name, 'MySQL'
|
286
|
+
assert_equal (mysql_worker.adapter_name == 'MySQL'), true
|
287
|
+
assert_equal mysql_worker.ignore, true
|
288
|
+
assert_equal ((mysql_worker.adapter_name == 'MySQL') & mysql_worker.ignore), true
|
289
|
+
|
290
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
291
|
+
|
292
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
293
|
+
end
|
294
|
+
end
|
295
|
+
|
296
|
+
test "adapter dependent mysql methods work for mysql2" do
|
297
|
+
connection = Testing.connection
|
298
|
+
connection.stub :adapter_name, 'Mysql2' do
|
299
|
+
mysql_worker = BulkInsert::Worker.new(
|
300
|
+
connection,
|
301
|
+
Testing.table_name,
|
302
|
+
'id',
|
303
|
+
%w(greeting age happy created_at updated_at color),
|
304
|
+
500, # batch size
|
305
|
+
true, # ignore
|
306
|
+
true) # update_duplicates
|
307
|
+
|
308
|
+
assert_equal mysql_worker.adapter_name, 'Mysql2'
|
309
|
+
assert mysql_worker.ignore
|
310
|
+
|
311
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
312
|
+
|
313
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON DUPLICATE KEY UPDATE `greeting`=VALUES(`greeting`), `age`=VALUES(`age`), `happy`=VALUES(`happy`), `created_at`=VALUES(`created_at`), `updated_at`=VALUES(`updated_at`), `color`=VALUES(`color`)"
|
314
|
+
end
|
315
|
+
end
|
316
|
+
|
317
|
+
test "adapter dependent Mysql2Spatial methods" do
|
318
|
+
connection = Testing.connection
|
319
|
+
connection.stub :adapter_name, 'Mysql2Spatial' do
|
320
|
+
mysql_worker = BulkInsert::Worker.new(
|
321
|
+
connection,
|
322
|
+
Testing.table_name,
|
323
|
+
'id',
|
324
|
+
%w(greeting age happy created_at updated_at color),
|
325
|
+
500, # batch size
|
326
|
+
true) # ignore
|
327
|
+
|
328
|
+
assert_equal mysql_worker.adapter_name, 'Mysql2Spatial'
|
329
|
+
|
330
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
331
|
+
|
332
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
333
|
+
end
|
334
|
+
end
|
335
|
+
|
336
|
+
test "adapter dependent postgresql methods" do
|
337
|
+
connection = Testing.connection
|
338
|
+
connection.stub :adapter_name, 'PostgreSQL' do
|
339
|
+
pgsql_worker = BulkInsert::Worker.new(
|
340
|
+
connection,
|
341
|
+
Testing.table_name,
|
342
|
+
'id',
|
343
|
+
%w(greeting age happy created_at updated_at color),
|
344
|
+
500, # batch size
|
345
|
+
true, # ignore
|
346
|
+
false, # update duplicates
|
347
|
+
true # return primary keys
|
348
|
+
)
|
349
|
+
|
350
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
351
|
+
|
352
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT DO NOTHING RETURNING id"
|
353
|
+
end
|
354
|
+
end
|
355
|
+
|
356
|
+
test "adapter dependent postgresql methods (no ignore, no update_duplicates)" do
|
357
|
+
connection = Testing.connection
|
358
|
+
connection.stub :adapter_name, 'PostgreSQL' do
|
359
|
+
pgsql_worker = BulkInsert::Worker.new(
|
360
|
+
connection,
|
361
|
+
Testing.table_name,
|
362
|
+
'id',
|
363
|
+
%w(greeting age happy created_at updated_at color),
|
364
|
+
500, # batch size
|
365
|
+
false, # ignore
|
366
|
+
false, # update duplicates
|
367
|
+
true # return primary keys
|
368
|
+
)
|
369
|
+
|
370
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
371
|
+
|
372
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') RETURNING id"
|
373
|
+
end
|
374
|
+
end
|
375
|
+
|
376
|
+
test "adapter dependent postgresql methods (with update_duplicates)" do
|
377
|
+
connection = Testing.connection
|
378
|
+
connection.stub :adapter_name, 'PostgreSQL' do
|
379
|
+
pgsql_worker = BulkInsert::Worker.new(
|
380
|
+
connection,
|
381
|
+
Testing.table_name,
|
382
|
+
'id',
|
383
|
+
%w(greeting age happy created_at updated_at color),
|
384
|
+
500, # batch size
|
385
|
+
false, # ignore
|
386
|
+
%w(greeting age happy), # update duplicates
|
387
|
+
true # return primary keys
|
388
|
+
)
|
389
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
390
|
+
|
391
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT(greeting, age, happy) DO UPDATE SET greeting=EXCLUDED.greeting, age=EXCLUDED.age, happy=EXCLUDED.happy, created_at=EXCLUDED.created_at, updated_at=EXCLUDED.updated_at, color=EXCLUDED.color RETURNING id"
|
392
|
+
end
|
393
|
+
end
|
394
|
+
|
395
|
+
test "adapter dependent PostGIS methods" do
|
396
|
+
connection = Testing.connection
|
397
|
+
connection.stub :adapter_name, 'PostGIS' do
|
398
|
+
pgsql_worker = BulkInsert::Worker.new(
|
399
|
+
connection,
|
400
|
+
Testing.table_name,
|
401
|
+
'id',
|
402
|
+
%w(greeting age happy created_at updated_at color),
|
403
|
+
500, # batch size
|
404
|
+
true, # ignore
|
405
|
+
false, # update duplicates
|
406
|
+
true # return primary keys
|
407
|
+
)
|
408
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
409
|
+
|
410
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT DO NOTHING RETURNING id"
|
411
|
+
end
|
412
|
+
end
|
413
|
+
|
414
|
+
test "adapter dependent sqlite3 methods (with lowercase adapter name)" do
|
415
|
+
sqlite_worker = BulkInsert::Worker.new(
|
416
|
+
Testing.connection,
|
417
|
+
Testing.table_name,
|
418
|
+
'id',
|
419
|
+
%w(greeting age happy created_at updated_at color),
|
420
|
+
500, # batch size
|
421
|
+
true) # ignore
|
422
|
+
sqlite_worker.adapter_name = 'sqlite3'
|
423
|
+
sqlite_worker.add ["Yo", 15, false, nil, nil]
|
424
|
+
|
425
|
+
assert_equal sqlite_worker.compose_insert_query, "INSERT OR IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
426
|
+
end
|
427
|
+
|
428
|
+
test "adapter dependent sqlite3 methods (with stylecase adapter name)" do
|
429
|
+
sqlite_worker = BulkInsert::Worker.new(
|
430
|
+
Testing.connection,
|
431
|
+
Testing.table_name,
|
432
|
+
'id',
|
433
|
+
%w(greeting age happy created_at updated_at color),
|
434
|
+
500, # batch size
|
435
|
+
true) # ignore
|
436
|
+
sqlite_worker.adapter_name = 'SQLite'
|
437
|
+
sqlite_worker.add ["Yo", 15, false, nil, nil]
|
438
|
+
|
439
|
+
assert_equal sqlite_worker.compose_insert_query, "INSERT OR IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
440
|
+
end
|
441
|
+
|
442
|
+
test "mysql adapter can update duplicates" do
|
443
|
+
connection = Testing.connection
|
444
|
+
connection.stub :adapter_name, 'MySQL' do
|
445
|
+
mysql_worker = BulkInsert::Worker.new(
|
446
|
+
connection,
|
447
|
+
Testing.table_name,
|
448
|
+
'id',
|
449
|
+
%w(greeting age happy created_at updated_at color),
|
450
|
+
500, # batch size
|
451
|
+
false, # ignore
|
452
|
+
true # update_duplicates
|
453
|
+
)
|
454
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
455
|
+
|
456
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON DUPLICATE KEY UPDATE `greeting`=VALUES(`greeting`), `age`=VALUES(`age`), `happy`=VALUES(`happy`), `created_at`=VALUES(`created_at`), `updated_at`=VALUES(`updated_at`), `color`=VALUES(`color`)"
|
457
|
+
end
|
458
|
+
end
|
459
|
+
end
|