bulk_insert 1.5.0 → 1.8.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/README.md +42 -1
- data/lib/bulk_insert.rb +2 -2
- data/lib/bulk_insert/statement_adapters.rb +22 -0
- data/lib/bulk_insert/statement_adapters/base_adapter.rb +21 -0
- data/lib/bulk_insert/statement_adapters/generic_adapter.rb +19 -0
- data/lib/bulk_insert/statement_adapters/mysql_adapter.rb +24 -0
- data/lib/bulk_insert/statement_adapters/postgresql_adapter.rb +28 -0
- data/lib/bulk_insert/statement_adapters/sqlite_adapter.rb +19 -0
- data/lib/bulk_insert/version.rb +2 -2
- data/lib/bulk_insert/worker.rb +26 -20
- data/test/bulk_insert/worker_test.rb +203 -25
- data/test/bulk_insert_test.rb +15 -1
- data/test/dummy/config/application.rb +1 -3
- data/test/dummy/db/test.sqlite3 +0 -0
- data/test/dummy/log/test.log +658 -4214
- metadata +45 -42
- data/test/dummy/db/development.sqlite3 +0 -0
- data/test/dummy/log/development.log +0 -17
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: d23ef2d75aeafbefae857d874eff4f18183717fbcd1a63128a83ce407a0dfba1
|
4
|
+
data.tar.gz: 9bb240020a5559bf12ff4d91be847590142302fce59927707509bfdd20820bfc
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: cadb63453d7036005118e8009007deb1e194a0284edbc8580009314fd02348b408bdfb2f640b6bf9eba6352a9eef77b696662463f24dbddaeedaf9dfb60613dd
|
7
|
+
data.tar.gz: a93e786c4d29bf7a2678d32e657a7c7ae3e0c92a91121b52ce375eaa25eaab7cd73a884624f1315ae0e83fa93bdcb9f70c4160ea26dc07530baee2b28d8ef944
|
data/README.md
CHANGED
@@ -104,7 +104,6 @@ empty the batch so that you can add more rows to it if you want. Note
|
|
104
104
|
that all records saved together will have the same created_at/updated_at
|
105
105
|
timestamp (unless one was explicitly set).
|
106
106
|
|
107
|
-
|
108
107
|
### Batch Set Size
|
109
108
|
|
110
109
|
By default, the size of the insert is limited to 500 rows at a time.
|
@@ -149,6 +148,48 @@ Book.bulk_insert(*destination_columns, ignore: true) do |worker|
|
|
149
148
|
end
|
150
149
|
```
|
151
150
|
|
151
|
+
### Update Duplicates (MySQL, PostgreSQL)
|
152
|
+
|
153
|
+
If you don't want to ignore duplicate rows but instead want to update them
|
154
|
+
then you can use the _update_duplicates_ option. Set this option to true
|
155
|
+
(MySQL) or list unique column names (PostgreSQL) and when a duplicate row
|
156
|
+
is found the row will be updated with your new values.
|
157
|
+
Default value for this option is false.
|
158
|
+
|
159
|
+
```ruby
|
160
|
+
destination_columns = [:title, :author]
|
161
|
+
|
162
|
+
# Update duplicate rows (MySQL)
|
163
|
+
Book.bulk_insert(*destination_columns, update_duplicates: true) do |worker|
|
164
|
+
worker.add(...)
|
165
|
+
worker.add(...)
|
166
|
+
# ...
|
167
|
+
end
|
168
|
+
|
169
|
+
# Update duplicate rows (PostgreSQL)
|
170
|
+
Book.bulk_insert(*destination_columns, update_duplicates: %w[title]) do |worker|
|
171
|
+
worker.add(...)
|
172
|
+
# ...
|
173
|
+
end
|
174
|
+
```
|
175
|
+
|
176
|
+
### Return Primary Keys (PostgreSQL, PostGIS)
|
177
|
+
|
178
|
+
If you want the worker to store primary keys of inserted records, then you can
|
179
|
+
use the _return_primary_keys_ option. The worker will store a `result_sets`
|
180
|
+
array of `ActiveRecord::Result` objects. Each `ActiveRecord::Result` object
|
181
|
+
will contain the primary keys of a batch of inserted records.
|
182
|
+
|
183
|
+
```ruby
|
184
|
+
worker = Book.bulk_insert(*destination_columns, return_primary_keys: true) do
|
185
|
+
|worker|
|
186
|
+
worker.add(...)
|
187
|
+
worker.add(...)
|
188
|
+
# ...
|
189
|
+
end
|
190
|
+
|
191
|
+
worker.result_sets
|
192
|
+
```
|
152
193
|
|
153
194
|
## License
|
154
195
|
|
data/lib/bulk_insert.rb
CHANGED
@@ -4,9 +4,9 @@ module BulkInsert
|
|
4
4
|
extend ActiveSupport::Concern
|
5
5
|
|
6
6
|
module ClassMethods
|
7
|
-
def bulk_insert(*columns, values: nil, set_size:500, ignore: false)
|
7
|
+
def bulk_insert(*columns, values: nil, set_size:500, ignore: false, update_duplicates: false, return_primary_keys: false)
|
8
8
|
columns = default_bulk_columns if columns.empty?
|
9
|
-
worker = BulkInsert::Worker.new(connection, table_name, columns, set_size, ignore)
|
9
|
+
worker = BulkInsert::Worker.new(connection, table_name, primary_key, columns, set_size, ignore, update_duplicates, return_primary_keys)
|
10
10
|
|
11
11
|
if values.present?
|
12
12
|
transaction do
|
@@ -0,0 +1,22 @@
|
|
1
|
+
require_relative 'statement_adapters/generic_adapter'
|
2
|
+
require_relative 'statement_adapters/mysql_adapter'
|
3
|
+
require_relative 'statement_adapters/postgresql_adapter'
|
4
|
+
require_relative 'statement_adapters/sqlite_adapter'
|
5
|
+
|
6
|
+
module BulkInsert
|
7
|
+
module StatementAdapters
|
8
|
+
def adapter_for(connection)
|
9
|
+
case connection.adapter_name
|
10
|
+
when /^mysql/i
|
11
|
+
MySQLAdapter.new
|
12
|
+
when /\APost(?:greSQL|GIS)/i
|
13
|
+
PostgreSQLAdapter.new
|
14
|
+
when /\ASQLite/i
|
15
|
+
SQLiteAdapter.new
|
16
|
+
else
|
17
|
+
GenericAdapter.new
|
18
|
+
end
|
19
|
+
end
|
20
|
+
module_function :adapter_for
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
module BulkInsert
|
2
|
+
module StatementAdapters
|
3
|
+
class BaseAdapter
|
4
|
+
def initialize
|
5
|
+
raise "You cannot initialize base adapter" if self.class == BaseAdapter
|
6
|
+
end
|
7
|
+
|
8
|
+
def insert_ignore_statement
|
9
|
+
raise "Not implemented"
|
10
|
+
end
|
11
|
+
|
12
|
+
def on_conflict_statement(_columns, _ignore, _update_duplicates)
|
13
|
+
raise "Not implemented"
|
14
|
+
end
|
15
|
+
|
16
|
+
def primary_key_return_statement(_primary_key)
|
17
|
+
raise "Not implemented"
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class GenericAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
''
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(_columns, _ignore, _update_duplicates)
|
11
|
+
''
|
12
|
+
end
|
13
|
+
|
14
|
+
def primary_key_return_statement(_primary_key)
|
15
|
+
''
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class MySQLAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
'IGNORE'
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(columns, _ignore, update_duplicates)
|
11
|
+
return '' unless update_duplicates
|
12
|
+
|
13
|
+
update_values = columns.map do |column|
|
14
|
+
"`#{column.name}`=VALUES(`#{column.name}`)"
|
15
|
+
end.join(', ')
|
16
|
+
' ON DUPLICATE KEY UPDATE ' + update_values
|
17
|
+
end
|
18
|
+
|
19
|
+
def primary_key_return_statement(_primary_key)
|
20
|
+
''
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class PostgreSQLAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
''
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(columns, ignore, update_duplicates)
|
11
|
+
if ignore
|
12
|
+
' ON CONFLICT DO NOTHING'
|
13
|
+
elsif update_duplicates
|
14
|
+
update_values = columns.map do |column|
|
15
|
+
"#{column.name}=EXCLUDED.#{column.name}"
|
16
|
+
end.join(', ')
|
17
|
+
' ON CONFLICT(' + update_duplicates.join(', ') + ') DO UPDATE SET ' + update_values
|
18
|
+
else
|
19
|
+
''
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def primary_key_return_statement(primary_key)
|
24
|
+
" RETURNING #{primary_key}"
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,19 @@
|
|
1
|
+
require_relative 'base_adapter'
|
2
|
+
|
3
|
+
module BulkInsert
|
4
|
+
module StatementAdapters
|
5
|
+
class SQLiteAdapter < BaseAdapter
|
6
|
+
def insert_ignore_statement
|
7
|
+
'OR IGNORE'
|
8
|
+
end
|
9
|
+
|
10
|
+
def on_conflict_statement(_columns, _ignore, _update_duplicates)
|
11
|
+
''
|
12
|
+
end
|
13
|
+
|
14
|
+
def primary_key_return_statement(_primary_key)
|
15
|
+
''
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
data/lib/bulk_insert/version.rb
CHANGED
data/lib/bulk_insert/worker.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
require_relative 'statement_adapters'
|
2
|
+
|
1
3
|
module BulkInsert
|
2
4
|
class Worker
|
3
5
|
attr_reader :connection
|
@@ -5,19 +7,24 @@ module BulkInsert
|
|
5
7
|
attr_accessor :before_save_callback
|
6
8
|
attr_accessor :after_save_callback
|
7
9
|
attr_accessor :adapter_name
|
8
|
-
attr_reader :ignore
|
10
|
+
attr_reader :ignore, :update_duplicates, :result_sets
|
11
|
+
|
12
|
+
def initialize(connection, table_name, primary_key, column_names, set_size=500, ignore=false, update_duplicates=false, return_primary_keys=false)
|
13
|
+
@statement_adapter = StatementAdapters.adapter_for(connection)
|
9
14
|
|
10
|
-
def initialize(connection, table_name, column_names, set_size=500, ignore=false)
|
11
15
|
@connection = connection
|
12
16
|
@set_size = set_size
|
13
17
|
|
14
18
|
@adapter_name = connection.adapter_name
|
15
19
|
# INSERT IGNORE only fails inserts with duplicate keys or unallowed nulls not the whole set of inserts
|
16
20
|
@ignore = ignore
|
21
|
+
@update_duplicates = update_duplicates
|
22
|
+
@return_primary_keys = return_primary_keys
|
17
23
|
|
18
24
|
columns = connection.columns(table_name)
|
19
25
|
column_map = columns.inject({}) { |h, c| h.update(c.name => c) }
|
20
26
|
|
27
|
+
@primary_key = primary_key
|
21
28
|
@columns = column_names.map { |name| column_map[name.to_s] }
|
22
29
|
@table_name = connection.quote_table_name(table_name)
|
23
30
|
@column_names = column_names.map { |name| connection.quote_column_name(name) }.join(",")
|
@@ -25,6 +32,7 @@ module BulkInsert
|
|
25
32
|
@before_save_callback = nil
|
26
33
|
@after_save_callback = nil
|
27
34
|
|
35
|
+
@result_sets = []
|
28
36
|
@set = []
|
29
37
|
end
|
30
38
|
|
@@ -75,7 +83,7 @@ module BulkInsert
|
|
75
83
|
def save!
|
76
84
|
if pending?
|
77
85
|
@before_save_callback.(@set) if @before_save_callback
|
78
|
-
|
86
|
+
execute_query
|
79
87
|
@after_save_callback.() if @after_save_callback
|
80
88
|
@set.clear
|
81
89
|
end
|
@@ -83,9 +91,16 @@ module BulkInsert
|
|
83
91
|
self
|
84
92
|
end
|
85
93
|
|
94
|
+
def execute_query
|
95
|
+
if query = compose_insert_query
|
96
|
+
result_set = @connection.exec_query(query)
|
97
|
+
@result_sets.push(result_set) if @return_primary_keys
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
86
101
|
def compose_insert_query
|
87
102
|
sql = insert_sql_statement
|
88
|
-
@now = Time.now
|
103
|
+
@now = Time.now
|
89
104
|
rows = []
|
90
105
|
|
91
106
|
@set.each do |row|
|
@@ -94,7 +109,10 @@ module BulkInsert
|
|
94
109
|
value = @now if value == :__timestamp_placeholder
|
95
110
|
|
96
111
|
if ActiveRecord::VERSION::STRING >= "5.0.0"
|
97
|
-
|
112
|
+
if column
|
113
|
+
type = @connection.lookup_cast_type_from_column(column)
|
114
|
+
value = type.serialize(value)
|
115
|
+
end
|
98
116
|
values << @connection.quote(value)
|
99
117
|
else
|
100
118
|
values << @connection.quote(value, column)
|
@@ -105,7 +123,8 @@ module BulkInsert
|
|
105
123
|
|
106
124
|
if !rows.empty?
|
107
125
|
sql << rows.join(",")
|
108
|
-
sql << on_conflict_statement
|
126
|
+
sql << @statement_adapter.on_conflict_statement(@columns, ignore, update_duplicates)
|
127
|
+
sql << @statement_adapter.primary_key_return_statement(@primary_key) if @return_primary_keys
|
109
128
|
sql
|
110
129
|
else
|
111
130
|
false
|
@@ -113,21 +132,8 @@ module BulkInsert
|
|
113
132
|
end
|
114
133
|
|
115
134
|
def insert_sql_statement
|
116
|
-
insert_ignore =
|
117
|
-
if adapter_name == "MySQL"
|
118
|
-
'IGNORE'
|
119
|
-
elsif adapter_name.match(/sqlite.*/i)
|
120
|
-
'OR IGNORE'
|
121
|
-
else
|
122
|
-
'' # Not supported
|
123
|
-
end
|
124
|
-
end
|
125
|
-
|
135
|
+
insert_ignore = @ignore ? @statement_adapter.insert_ignore_statement : ''
|
126
136
|
"INSERT #{insert_ignore} INTO #{@table_name} (#{@column_names}) VALUES "
|
127
137
|
end
|
128
|
-
|
129
|
-
def on_conflict_statement
|
130
|
-
(adapter_name == 'PostgreSQL' && ignore ) ? ' ON CONFLICT DO NOTHING' : ''
|
131
|
-
end
|
132
138
|
end
|
133
139
|
end
|
@@ -1,3 +1,4 @@
|
|
1
|
+
require 'minitest/mock'
|
1
2
|
require 'test_helper'
|
2
3
|
|
3
4
|
class BulkInsertWorkerTest < ActiveSupport::TestCase
|
@@ -5,6 +6,7 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase
|
|
5
6
|
@insert = BulkInsert::Worker.new(
|
6
7
|
Testing.connection,
|
7
8
|
Testing.table_name,
|
9
|
+
'id',
|
8
10
|
%w(greeting age happy created_at updated_at color))
|
9
11
|
@now = Time.now
|
10
12
|
end
|
@@ -121,6 +123,53 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase
|
|
121
123
|
assert_equal true, hello.happy?
|
122
124
|
end
|
123
125
|
|
126
|
+
test "save! does not add to result sets when not returning primary keys" do
|
127
|
+
@insert.add greeting: "first"
|
128
|
+
@insert.add greeting: "second"
|
129
|
+
@insert.save!
|
130
|
+
|
131
|
+
assert_equal 0, @insert.result_sets.count
|
132
|
+
end
|
133
|
+
|
134
|
+
|
135
|
+
test "save! adds to result sets when returning primary keys" do
|
136
|
+
worker = BulkInsert::Worker.new(
|
137
|
+
Testing.connection,
|
138
|
+
Testing.table_name,
|
139
|
+
'id',
|
140
|
+
%w(greeting age happy created_at updated_at color),
|
141
|
+
500,
|
142
|
+
false,
|
143
|
+
false,
|
144
|
+
true
|
145
|
+
)
|
146
|
+
|
147
|
+
assert_no_difference -> { worker.result_sets.count } do
|
148
|
+
worker.save!
|
149
|
+
end
|
150
|
+
|
151
|
+
worker.add greeting: "first"
|
152
|
+
worker.add greeting: "second"
|
153
|
+
worker.save!
|
154
|
+
assert_equal 1, worker.result_sets.count
|
155
|
+
|
156
|
+
worker.add greeting: "third"
|
157
|
+
worker.add greeting: "fourth"
|
158
|
+
worker.save!
|
159
|
+
assert_equal 2, worker.result_sets.count
|
160
|
+
end
|
161
|
+
|
162
|
+
test "initialized with empty result sets array" do
|
163
|
+
new_worker = BulkInsert::Worker.new(
|
164
|
+
Testing.connection,
|
165
|
+
Testing.table_name,
|
166
|
+
'id',
|
167
|
+
%w(greeting age happy created_at updated_at color)
|
168
|
+
)
|
169
|
+
assert_instance_of(Array, new_worker.result_sets)
|
170
|
+
assert_empty new_worker.result_sets
|
171
|
+
end
|
172
|
+
|
124
173
|
test "save! calls the after_save handler" do
|
125
174
|
x = 41
|
126
175
|
|
@@ -218,64 +267,193 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase
|
|
218
267
|
assert_equal @insert.insert_sql_statement, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES "
|
219
268
|
|
220
269
|
@insert.add ["Yo", 15, false, nil, nil]
|
221
|
-
assert_equal @insert.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,
|
270
|
+
assert_equal @insert.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
222
271
|
end
|
223
272
|
|
224
273
|
test "adapter dependent mysql methods" do
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
274
|
+
connection = Testing.connection
|
275
|
+
connection.stub :adapter_name, 'MySQL' do
|
276
|
+
mysql_worker = BulkInsert::Worker.new(
|
277
|
+
connection,
|
278
|
+
Testing.table_name,
|
279
|
+
'id',
|
280
|
+
%w(greeting age happy created_at updated_at color),
|
281
|
+
500, # batch size
|
282
|
+
true # ignore
|
283
|
+
)
|
284
|
+
|
285
|
+
assert_equal mysql_worker.adapter_name, 'MySQL'
|
286
|
+
assert_equal (mysql_worker.adapter_name == 'MySQL'), true
|
287
|
+
assert_equal mysql_worker.ignore, true
|
288
|
+
assert_equal ((mysql_worker.adapter_name == 'MySQL') & mysql_worker.ignore), true
|
289
|
+
|
290
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
291
|
+
|
292
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
293
|
+
end
|
294
|
+
end
|
232
295
|
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
296
|
+
test "adapter dependent mysql methods work for mysql2" do
|
297
|
+
connection = Testing.connection
|
298
|
+
connection.stub :adapter_name, 'Mysql2' do
|
299
|
+
mysql_worker = BulkInsert::Worker.new(
|
300
|
+
connection,
|
301
|
+
Testing.table_name,
|
302
|
+
'id',
|
303
|
+
%w(greeting age happy created_at updated_at color),
|
304
|
+
500, # batch size
|
305
|
+
true, # ignore
|
306
|
+
true) # update_duplicates
|
237
307
|
|
238
|
-
|
308
|
+
assert_equal mysql_worker.adapter_name, 'Mysql2'
|
309
|
+
assert mysql_worker.ignore
|
239
310
|
|
240
|
-
|
311
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
312
|
+
|
313
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON DUPLICATE KEY UPDATE `greeting`=VALUES(`greeting`), `age`=VALUES(`age`), `happy`=VALUES(`happy`), `created_at`=VALUES(`created_at`), `updated_at`=VALUES(`updated_at`), `color`=VALUES(`color`)"
|
314
|
+
end
|
315
|
+
end
|
316
|
+
|
317
|
+
test "adapter dependent Mysql2Spatial methods" do
|
318
|
+
connection = Testing.connection
|
319
|
+
connection.stub :adapter_name, 'Mysql2Spatial' do
|
320
|
+
mysql_worker = BulkInsert::Worker.new(
|
321
|
+
connection,
|
322
|
+
Testing.table_name,
|
323
|
+
'id',
|
324
|
+
%w(greeting age happy created_at updated_at color),
|
325
|
+
500, # batch size
|
326
|
+
true) # ignore
|
327
|
+
|
328
|
+
assert_equal mysql_worker.adapter_name, 'Mysql2Spatial'
|
329
|
+
|
330
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
331
|
+
|
332
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
333
|
+
end
|
241
334
|
end
|
242
335
|
|
243
336
|
test "adapter dependent postgresql methods" do
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
337
|
+
connection = Testing.connection
|
338
|
+
connection.stub :adapter_name, 'PostgreSQL' do
|
339
|
+
pgsql_worker = BulkInsert::Worker.new(
|
340
|
+
connection,
|
341
|
+
Testing.table_name,
|
342
|
+
'id',
|
343
|
+
%w(greeting age happy created_at updated_at color),
|
344
|
+
500, # batch size
|
345
|
+
true, # ignore
|
346
|
+
false, # update duplicates
|
347
|
+
true # return primary keys
|
348
|
+
)
|
349
|
+
|
350
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
351
|
+
|
352
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT DO NOTHING RETURNING id"
|
353
|
+
end
|
354
|
+
end
|
355
|
+
|
356
|
+
test "adapter dependent postgresql methods (no ignore, no update_duplicates)" do
|
357
|
+
connection = Testing.connection
|
358
|
+
connection.stub :adapter_name, 'PostgreSQL' do
|
359
|
+
pgsql_worker = BulkInsert::Worker.new(
|
360
|
+
connection,
|
361
|
+
Testing.table_name,
|
362
|
+
'id',
|
363
|
+
%w(greeting age happy created_at updated_at color),
|
364
|
+
500, # batch size
|
365
|
+
false, # ignore
|
366
|
+
false, # update duplicates
|
367
|
+
true # return primary keys
|
368
|
+
)
|
369
|
+
|
370
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
371
|
+
|
372
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') RETURNING id"
|
373
|
+
end
|
374
|
+
end
|
375
|
+
|
376
|
+
test "adapter dependent postgresql methods (with update_duplicates)" do
|
377
|
+
connection = Testing.connection
|
378
|
+
connection.stub :adapter_name, 'PostgreSQL' do
|
379
|
+
pgsql_worker = BulkInsert::Worker.new(
|
380
|
+
connection,
|
381
|
+
Testing.table_name,
|
382
|
+
'id',
|
383
|
+
%w(greeting age happy created_at updated_at color),
|
384
|
+
500, # batch size
|
385
|
+
false, # ignore
|
386
|
+
%w(greeting age happy), # update duplicates
|
387
|
+
true # return primary keys
|
388
|
+
)
|
389
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
390
|
+
|
391
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT(greeting, age, happy) DO UPDATE SET greeting=EXCLUDED.greeting, age=EXCLUDED.age, happy=EXCLUDED.happy, created_at=EXCLUDED.created_at, updated_at=EXCLUDED.updated_at, color=EXCLUDED.color RETURNING id"
|
392
|
+
end
|
393
|
+
end
|
252
394
|
|
253
|
-
|
395
|
+
test "adapter dependent PostGIS methods" do
|
396
|
+
connection = Testing.connection
|
397
|
+
connection.stub :adapter_name, 'PostGIS' do
|
398
|
+
pgsql_worker = BulkInsert::Worker.new(
|
399
|
+
connection,
|
400
|
+
Testing.table_name,
|
401
|
+
'id',
|
402
|
+
%w(greeting age happy created_at updated_at color),
|
403
|
+
500, # batch size
|
404
|
+
true, # ignore
|
405
|
+
false, # update duplicates
|
406
|
+
true # return primary keys
|
407
|
+
)
|
408
|
+
pgsql_worker.add ["Yo", 15, false, nil, nil]
|
409
|
+
|
410
|
+
assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT DO NOTHING RETURNING id"
|
411
|
+
end
|
254
412
|
end
|
255
413
|
|
256
414
|
test "adapter dependent sqlite3 methods (with lowercase adapter name)" do
|
257
415
|
sqlite_worker = BulkInsert::Worker.new(
|
258
416
|
Testing.connection,
|
259
417
|
Testing.table_name,
|
418
|
+
'id',
|
260
419
|
%w(greeting age happy created_at updated_at color),
|
261
420
|
500, # batch size
|
262
421
|
true) # ignore
|
263
422
|
sqlite_worker.adapter_name = 'sqlite3'
|
264
423
|
sqlite_worker.add ["Yo", 15, false, nil, nil]
|
265
424
|
|
266
|
-
assert_equal sqlite_worker.compose_insert_query, "INSERT OR IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,
|
425
|
+
assert_equal sqlite_worker.compose_insert_query, "INSERT OR IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
267
426
|
end
|
268
427
|
|
269
428
|
test "adapter dependent sqlite3 methods (with stylecase adapter name)" do
|
270
429
|
sqlite_worker = BulkInsert::Worker.new(
|
271
430
|
Testing.connection,
|
272
431
|
Testing.table_name,
|
432
|
+
'id',
|
273
433
|
%w(greeting age happy created_at updated_at color),
|
274
434
|
500, # batch size
|
275
435
|
true) # ignore
|
276
436
|
sqlite_worker.adapter_name = 'SQLite'
|
277
437
|
sqlite_worker.add ["Yo", 15, false, nil, nil]
|
278
438
|
|
279
|
-
assert_equal sqlite_worker.compose_insert_query, "INSERT OR IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,
|
439
|
+
assert_equal sqlite_worker.compose_insert_query, "INSERT OR IGNORE INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse')"
|
440
|
+
end
|
441
|
+
|
442
|
+
test "mysql adapter can update duplicates" do
|
443
|
+
connection = Testing.connection
|
444
|
+
connection.stub :adapter_name, 'MySQL' do
|
445
|
+
mysql_worker = BulkInsert::Worker.new(
|
446
|
+
connection,
|
447
|
+
Testing.table_name,
|
448
|
+
'id',
|
449
|
+
%w(greeting age happy created_at updated_at color),
|
450
|
+
500, # batch size
|
451
|
+
false, # ignore
|
452
|
+
true # update_duplicates
|
453
|
+
)
|
454
|
+
mysql_worker.add ["Yo", 15, false, nil, nil]
|
455
|
+
|
456
|
+
assert_equal mysql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON DUPLICATE KEY UPDATE `greeting`=VALUES(`greeting`), `age`=VALUES(`age`), `happy`=VALUES(`happy`), `created_at`=VALUES(`created_at`), `updated_at`=VALUES(`updated_at`), `color`=VALUES(`color`)"
|
457
|
+
end
|
280
458
|
end
|
281
459
|
end
|