activerecord-import 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +32 -0
- data/.rubocop.yml +49 -0
- data/.rubocop_todo.yml +36 -0
- data/.travis.yml +74 -0
- data/Brewfile +3 -0
- data/CHANGELOG.md +430 -0
- data/Gemfile +59 -0
- data/LICENSE +56 -0
- data/README.markdown +619 -0
- data/Rakefile +68 -0
- data/activerecord-import.gemspec +23 -0
- data/benchmarks/README +32 -0
- data/benchmarks/benchmark.rb +68 -0
- data/benchmarks/lib/base.rb +138 -0
- data/benchmarks/lib/cli_parser.rb +107 -0
- data/benchmarks/lib/float.rb +15 -0
- data/benchmarks/lib/mysql2_benchmark.rb +19 -0
- data/benchmarks/lib/output_to_csv.rb +19 -0
- data/benchmarks/lib/output_to_html.rb +64 -0
- data/benchmarks/models/test_innodb.rb +3 -0
- data/benchmarks/models/test_memory.rb +3 -0
- data/benchmarks/models/test_myisam.rb +3 -0
- data/benchmarks/schema/mysql_schema.rb +16 -0
- data/gemfiles/3.2.gemfile +2 -0
- data/gemfiles/4.0.gemfile +2 -0
- data/gemfiles/4.1.gemfile +2 -0
- data/gemfiles/4.2.gemfile +2 -0
- data/gemfiles/5.0.gemfile +2 -0
- data/gemfiles/5.1.gemfile +2 -0
- data/gemfiles/5.2.gemfile +2 -0
- data/gemfiles/6.0.gemfile +1 -0
- data/gemfiles/6.1.gemfile +1 -0
- data/lib/activerecord-import.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/abstract_adapter.rb +9 -0
- data/lib/activerecord-import/active_record/adapters/jdbcmysql_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/jdbcpostgresql_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/jdbcsqlite3_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/mysql2_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/postgresql_adapter.rb +6 -0
- data/lib/activerecord-import/active_record/adapters/seamless_database_pool_adapter.rb +7 -0
- data/lib/activerecord-import/active_record/adapters/sqlite3_adapter.rb +6 -0
- data/lib/activerecord-import/adapters/abstract_adapter.rb +66 -0
- data/lib/activerecord-import/adapters/em_mysql2_adapter.rb +5 -0
- data/lib/activerecord-import/adapters/mysql2_adapter.rb +5 -0
- data/lib/activerecord-import/adapters/mysql_adapter.rb +129 -0
- data/lib/activerecord-import/adapters/postgresql_adapter.rb +217 -0
- data/lib/activerecord-import/adapters/sqlite3_adapter.rb +180 -0
- data/lib/activerecord-import/base.rb +43 -0
- data/lib/activerecord-import/import.rb +1059 -0
- data/lib/activerecord-import/mysql2.rb +7 -0
- data/lib/activerecord-import/postgresql.rb +7 -0
- data/lib/activerecord-import/sqlite3.rb +7 -0
- data/lib/activerecord-import/synchronize.rb +66 -0
- data/lib/activerecord-import/value_sets_parser.rb +77 -0
- data/lib/activerecord-import/version.rb +5 -0
- data/test/adapters/jdbcmysql.rb +1 -0
- data/test/adapters/jdbcpostgresql.rb +1 -0
- data/test/adapters/jdbcsqlite3.rb +1 -0
- data/test/adapters/makara_postgis.rb +1 -0
- data/test/adapters/mysql2.rb +1 -0
- data/test/adapters/mysql2_makara.rb +1 -0
- data/test/adapters/mysql2spatial.rb +1 -0
- data/test/adapters/postgis.rb +1 -0
- data/test/adapters/postgresql.rb +1 -0
- data/test/adapters/postgresql_makara.rb +1 -0
- data/test/adapters/seamless_database_pool.rb +1 -0
- data/test/adapters/spatialite.rb +1 -0
- data/test/adapters/sqlite3.rb +1 -0
- data/test/database.yml.sample +52 -0
- data/test/import_test.rb +903 -0
- data/test/jdbcmysql/import_test.rb +5 -0
- data/test/jdbcpostgresql/import_test.rb +4 -0
- data/test/jdbcsqlite3/import_test.rb +4 -0
- data/test/makara_postgis/import_test.rb +8 -0
- data/test/models/account.rb +3 -0
- data/test/models/alarm.rb +2 -0
- data/test/models/bike_maker.rb +7 -0
- data/test/models/book.rb +9 -0
- data/test/models/car.rb +3 -0
- data/test/models/chapter.rb +4 -0
- data/test/models/dictionary.rb +4 -0
- data/test/models/discount.rb +3 -0
- data/test/models/end_note.rb +4 -0
- data/test/models/group.rb +3 -0
- data/test/models/promotion.rb +3 -0
- data/test/models/question.rb +3 -0
- data/test/models/rule.rb +3 -0
- data/test/models/tag.rb +4 -0
- data/test/models/topic.rb +23 -0
- data/test/models/user.rb +3 -0
- data/test/models/user_token.rb +4 -0
- data/test/models/vendor.rb +7 -0
- data/test/models/widget.rb +24 -0
- data/test/mysql2/import_test.rb +5 -0
- data/test/mysql2_makara/import_test.rb +6 -0
- data/test/mysqlspatial2/import_test.rb +6 -0
- data/test/postgis/import_test.rb +8 -0
- data/test/postgresql/import_test.rb +4 -0
- data/test/schema/generic_schema.rb +194 -0
- data/test/schema/jdbcpostgresql_schema.rb +1 -0
- data/test/schema/mysql2_schema.rb +19 -0
- data/test/schema/postgis_schema.rb +1 -0
- data/test/schema/postgresql_schema.rb +47 -0
- data/test/schema/sqlite3_schema.rb +13 -0
- data/test/schema/version.rb +10 -0
- data/test/sqlite3/import_test.rb +4 -0
- data/test/support/active_support/test_case_extensions.rb +75 -0
- data/test/support/assertions.rb +73 -0
- data/test/support/factories.rb +64 -0
- data/test/support/generate.rb +29 -0
- data/test/support/mysql/import_examples.rb +98 -0
- data/test/support/postgresql/import_examples.rb +563 -0
- data/test/support/shared_examples/on_duplicate_key_ignore.rb +43 -0
- data/test/support/shared_examples/on_duplicate_key_update.rb +368 -0
- data/test/support/shared_examples/recursive_import.rb +216 -0
- data/test/support/sqlite3/import_examples.rb +231 -0
- data/test/synchronize_test.rb +41 -0
- data/test/test_helper.rb +75 -0
- data/test/travis/database.yml +66 -0
- data/test/value_sets_bytes_parser_test.rb +104 -0
- data/test/value_sets_records_parser_test.rb +32 -0
- metadata +259 -0
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
warn <<-MSG
|
|
2
|
+
[DEPRECATION] loading activerecord-import via 'require "activerecord-import/<adapter-name>"'
|
|
3
|
+
is deprecated. Update to autorequire using 'require "activerecord-import"'. See
|
|
4
|
+
http://github.com/zdennis/activerecord-import/wiki/Requiring for more information
|
|
5
|
+
MSG
|
|
6
|
+
|
|
7
|
+
require "activerecord-import"
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
warn <<-MSG
|
|
2
|
+
[DEPRECATION] loading activerecord-import via 'require "activerecord-import/<adapter-name>"'
|
|
3
|
+
is deprecated. Update to autorequire using 'require "activerecord-import"'. See
|
|
4
|
+
http://github.com/zdennis/activerecord-import/wiki/Requiring for more information
|
|
5
|
+
MSG
|
|
6
|
+
|
|
7
|
+
require "activerecord-import"
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
warn <<-MSG
|
|
2
|
+
[DEPRECATION] loading activerecord-import via 'require "activerecord-import/<adapter-name>"'
|
|
3
|
+
is deprecated. Update to autorequire using 'require "activerecord-import"'. See
|
|
4
|
+
http://github.com/zdennis/activerecord-import/wiki/Requiring for more information
|
|
5
|
+
MSG
|
|
6
|
+
|
|
7
|
+
require "activerecord-import"
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
module ActiveRecord # :nodoc:
|
|
2
|
+
class Base # :nodoc:
|
|
3
|
+
# Synchronizes the passed in ActiveRecord instances with data
|
|
4
|
+
# from the database. This is like calling reload on an individual
|
|
5
|
+
# ActiveRecord instance but it is intended for use on multiple instances.
|
|
6
|
+
#
|
|
7
|
+
# This uses one query for all instance updates and then updates existing
|
|
8
|
+
# instances rather sending one query for each instance
|
|
9
|
+
#
|
|
10
|
+
# == Examples
|
|
11
|
+
# # Synchronizing existing models by matching on the primary key field
|
|
12
|
+
# posts = Post.where(author: "Zach").first
|
|
13
|
+
# <.. out of system changes occur to change author name from Zach to Zachary..>
|
|
14
|
+
# Post.synchronize posts
|
|
15
|
+
# posts.first.author # => "Zachary" instead of Zach
|
|
16
|
+
#
|
|
17
|
+
# # Synchronizing using custom key fields
|
|
18
|
+
# posts = Post.where(author: "Zach").first
|
|
19
|
+
# <.. out of system changes occur to change the address of author 'Zach' to 1245 Foo Ln ..>
|
|
20
|
+
# Post.synchronize posts, [:name] # queries on the :name column and not the :id column
|
|
21
|
+
# posts.first.address # => "1245 Foo Ln" instead of whatever it was
|
|
22
|
+
#
|
|
23
|
+
def self.synchronize(instances, keys = [primary_key])
|
|
24
|
+
return if instances.empty?
|
|
25
|
+
|
|
26
|
+
conditions = {}
|
|
27
|
+
|
|
28
|
+
key_values = keys.map { |key| instances.map(&key.to_sym) }
|
|
29
|
+
keys.zip(key_values).each { |key, values| conditions[key] = values }
|
|
30
|
+
order = keys.map { |key| "#{key} ASC" }.join(",")
|
|
31
|
+
|
|
32
|
+
klass = instances.first.class
|
|
33
|
+
|
|
34
|
+
fresh_instances = klass.unscoped.where(conditions).order(order)
|
|
35
|
+
instances.each do |instance|
|
|
36
|
+
matched_instance = fresh_instances.detect do |fresh_instance|
|
|
37
|
+
keys.all? { |key| fresh_instance.send(key) == instance.send(key) }
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
next unless matched_instance
|
|
41
|
+
|
|
42
|
+
instance.send :clear_association_cache
|
|
43
|
+
instance.send :clear_aggregation_cache if instance.respond_to?(:clear_aggregation_cache, true)
|
|
44
|
+
instance.instance_variable_set :@attributes, matched_instance.instance_variable_get(:@attributes)
|
|
45
|
+
|
|
46
|
+
if instance.respond_to?(:clear_changes_information)
|
|
47
|
+
instance.clear_changes_information # Rails 4.2 and higher
|
|
48
|
+
else
|
|
49
|
+
instance.instance_variable_set :@attributes_cache, {} # Rails 4.0, 4.1
|
|
50
|
+
instance.changed_attributes.clear # Rails 3.2
|
|
51
|
+
instance.previous_changes.clear
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# Since the instance now accurately reflects the record in
|
|
55
|
+
# the database, ensure that instance.persisted? is true.
|
|
56
|
+
instance.instance_variable_set '@new_record', false
|
|
57
|
+
instance.instance_variable_set '@destroyed', false
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# See ActiveRecord::ConnectionAdapters::AbstractAdapter.synchronize
|
|
62
|
+
def synchronize(instances, key = [ActiveRecord::Base.primary_key])
|
|
63
|
+
self.class.synchronize(instances, key)
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
end
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
module ActiveRecord::Import
|
|
2
|
+
class ValueSetTooLargeError < StandardError
|
|
3
|
+
attr_reader :size
|
|
4
|
+
def initialize(msg = "Value set exceeds max size", size = 0)
|
|
5
|
+
@size = size
|
|
6
|
+
super(msg)
|
|
7
|
+
end
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
class ValueSetsBytesParser
|
|
11
|
+
attr_reader :reserved_bytes, :max_bytes, :values
|
|
12
|
+
|
|
13
|
+
def self.parse(values, options)
|
|
14
|
+
new(values, options).parse
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def initialize(values, options)
|
|
18
|
+
@values = values
|
|
19
|
+
@reserved_bytes = options[:reserved_bytes] || 0
|
|
20
|
+
@max_bytes = options.fetch(:max_bytes) { default_max_bytes }
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def parse
|
|
24
|
+
value_sets = []
|
|
25
|
+
arr = []
|
|
26
|
+
current_size = 0
|
|
27
|
+
values.each_with_index do |val, i|
|
|
28
|
+
comma_bytes = arr.size
|
|
29
|
+
insert_size = reserved_bytes + val.bytesize
|
|
30
|
+
|
|
31
|
+
if insert_size > max_bytes
|
|
32
|
+
raise ValueSetTooLargeError.new("#{insert_size} bytes exceeds the max allowed for an insert [#{@max_bytes}]", insert_size)
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
bytes_thus_far = reserved_bytes + current_size + val.bytesize + comma_bytes
|
|
36
|
+
if bytes_thus_far <= max_bytes
|
|
37
|
+
current_size += val.bytesize
|
|
38
|
+
arr << val
|
|
39
|
+
else
|
|
40
|
+
value_sets << arr
|
|
41
|
+
arr = [val]
|
|
42
|
+
current_size = val.bytesize
|
|
43
|
+
end
|
|
44
|
+
|
|
45
|
+
# if we're on the last iteration push whatever we have in arr to value_sets
|
|
46
|
+
value_sets << arr if i == (values.size - 1)
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
value_sets
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
private
|
|
53
|
+
|
|
54
|
+
def default_max_bytes
|
|
55
|
+
values_in_bytes = values.sum(&:bytesize)
|
|
56
|
+
comma_separated_bytes = values.size - 1
|
|
57
|
+
reserved_bytes + values_in_bytes + comma_separated_bytes
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
class ValueSetsRecordsParser
|
|
62
|
+
attr_reader :max_records, :values
|
|
63
|
+
|
|
64
|
+
def self.parse(values, options)
|
|
65
|
+
new(values, options).parse
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
def initialize(values, options)
|
|
69
|
+
@values = values
|
|
70
|
+
@max_records = options[:max_records]
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def parse
|
|
74
|
+
@values.in_groups_of(max_records, false)
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "jdbcmysql"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "jdbcpostgresql"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "jdbcsqlite3"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "postgis"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "mysql2"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "mysql2_makara"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "mysql2spatial"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "postgis"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "postgresql"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "postgresql"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "seamless_database_pool"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "spatialite"
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
ENV["ARE_DB"] = "sqlite3"
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
common: &common
|
|
2
|
+
username: root
|
|
3
|
+
password:
|
|
4
|
+
encoding: utf8
|
|
5
|
+
host: localhost
|
|
6
|
+
database: activerecord_import_test
|
|
7
|
+
|
|
8
|
+
mysql2: &mysql2
|
|
9
|
+
<<: *common
|
|
10
|
+
adapter: mysql2
|
|
11
|
+
|
|
12
|
+
mysql2spatial:
|
|
13
|
+
<<: *mysql2
|
|
14
|
+
|
|
15
|
+
mysql2_makara:
|
|
16
|
+
<<: *mysql2
|
|
17
|
+
|
|
18
|
+
postgresql: &postgresql
|
|
19
|
+
<<: *common
|
|
20
|
+
username: postgres
|
|
21
|
+
adapter: postgresql
|
|
22
|
+
min_messages: warning
|
|
23
|
+
|
|
24
|
+
postresql_makara:
|
|
25
|
+
<<: *postgresql
|
|
26
|
+
|
|
27
|
+
postgis:
|
|
28
|
+
<<: *postgresql
|
|
29
|
+
|
|
30
|
+
oracle:
|
|
31
|
+
<<: *common
|
|
32
|
+
adapter: oracle
|
|
33
|
+
min_messages: debug
|
|
34
|
+
|
|
35
|
+
seamless_database_pool:
|
|
36
|
+
<<: *common
|
|
37
|
+
adapter: seamless_database_pool
|
|
38
|
+
prepared_statements: false
|
|
39
|
+
pool_adapter: mysql2
|
|
40
|
+
master:
|
|
41
|
+
host: localhost
|
|
42
|
+
|
|
43
|
+
sqlite:
|
|
44
|
+
adapter: sqlite
|
|
45
|
+
dbfile: test.db
|
|
46
|
+
|
|
47
|
+
sqlite3: &sqlite3
|
|
48
|
+
adapter: sqlite3
|
|
49
|
+
database: test.db
|
|
50
|
+
|
|
51
|
+
spatialite:
|
|
52
|
+
<<: *sqlite3
|
data/test/import_test.rb
ADDED
|
@@ -0,0 +1,903 @@
|
|
|
1
|
+
require File.expand_path('../test_helper', __FILE__)
|
|
2
|
+
|
|
3
|
+
describe "#import" do
|
|
4
|
+
it "should return the number of inserts performed" do
|
|
5
|
+
# see ActiveRecord::ConnectionAdapters::AbstractAdapter test for more specifics
|
|
6
|
+
assert_difference "Topic.count", +10 do
|
|
7
|
+
result = Topic.import Build(3, :topics)
|
|
8
|
+
assert result.num_inserts > 0
|
|
9
|
+
|
|
10
|
+
result = Topic.import Build(7, :topics)
|
|
11
|
+
assert result.num_inserts > 0
|
|
12
|
+
end
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
it "warns you that you're using the library wrong" do
|
|
16
|
+
error = assert_raise(ArgumentError) { Topic.import %w(title author_name), ['Author #1', 'Book #1', 0] }
|
|
17
|
+
assert_equal error.message, "Last argument should be a two dimensional array '[[]]'. First element in array was a String"
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
it "warns you that you're passing more data than you ought to" do
|
|
21
|
+
error = assert_raise(ArgumentError) { Topic.import %w(title author_name), [['Author #1', 'Book #1', 0]] }
|
|
22
|
+
assert_equal error.message, "Number of values (8) exceeds number of columns (7)"
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
it "should not produce an error when importing empty arrays" do
|
|
26
|
+
assert_nothing_raised do
|
|
27
|
+
Topic.import []
|
|
28
|
+
Topic.import %w(title author_name), []
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
describe "argument safety" do
|
|
33
|
+
it "should not modify the passed in columns array" do
|
|
34
|
+
assert_nothing_raised do
|
|
35
|
+
columns = %w(title author_name).freeze
|
|
36
|
+
Topic.import columns, [%w(foo bar)]
|
|
37
|
+
end
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
it "should not modify the passed in values array" do
|
|
41
|
+
assert_nothing_raised do
|
|
42
|
+
record = %w(foo bar).freeze
|
|
43
|
+
values = [record].freeze
|
|
44
|
+
Topic.import %w(title author_name), values
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
describe "with non-default ActiveRecord models" do
|
|
50
|
+
context "that have a non-standard primary key (that is no sequence)" do
|
|
51
|
+
it "should import models successfully" do
|
|
52
|
+
assert_difference "Widget.count", +3 do
|
|
53
|
+
Widget.import Build(3, :widgets)
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
context "with uppercase letters" do
|
|
58
|
+
it "should import models successfully" do
|
|
59
|
+
assert_difference "Car.count", +3 do
|
|
60
|
+
Car.import Build(3, :cars)
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
context "that have no primary key" do
|
|
67
|
+
it "should import models successfully" do
|
|
68
|
+
assert_difference "Rule.count", +3 do
|
|
69
|
+
Rule.import Build(3, :rules)
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
describe "with an array of hashes" do
|
|
76
|
+
let(:columns) { [:title, :author_name] }
|
|
77
|
+
let(:values) { [{ title: "LDAP", author_name: "Jerry Carter", author_email_address: "jcarter@test.com" }, { title: "Rails Recipes", author_name: "Chad Fowler", author_email_address: "cfowler@test.com" }] }
|
|
78
|
+
|
|
79
|
+
it "should import hash data successfully" do
|
|
80
|
+
assert_difference "Topic.count", +2 do
|
|
81
|
+
Topic.import values, validate: false
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
it "should import specified hash data successfully" do
|
|
86
|
+
assert_difference "Topic.count", +2 do
|
|
87
|
+
Topic.import columns, values, validate: false
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
Topic.all.each do |t|
|
|
91
|
+
assert_nil t.author_email_address
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
context "with extra keys" do
|
|
96
|
+
let(:values) do
|
|
97
|
+
[
|
|
98
|
+
{ title: "LDAP", author_name: "Jerry Carter" },
|
|
99
|
+
{ title: "Rails Recipes", author_name: "Chad Fowler", author_email_address: "cfowler@test.com" } # author_email_address is unknown
|
|
100
|
+
]
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
it "should fail when column names are not specified" do
|
|
104
|
+
err = assert_raises ArgumentError do
|
|
105
|
+
Topic.import values, validate: false
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
assert err.message.include? 'Extra keys: [:author_email_address]'
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
it "should succeed when column names are specified" do
|
|
112
|
+
assert_difference "Topic.count", +2 do
|
|
113
|
+
Topic.import columns, values, validate: false
|
|
114
|
+
end
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
context "with missing keys" do
|
|
119
|
+
let(:values) do
|
|
120
|
+
[
|
|
121
|
+
{ title: "LDAP", author_name: "Jerry Carter" },
|
|
122
|
+
{ title: "Rails Recipes" } # author_name is missing
|
|
123
|
+
]
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
it "should fail when column names are not specified" do
|
|
127
|
+
err = assert_raises ArgumentError do
|
|
128
|
+
Topic.import values, validate: false
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
assert err.message.include? 'Missing keys: [:author_name]'
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
it "should fail on missing hash key from specified column names" do
|
|
135
|
+
err = assert_raises ArgumentError do
|
|
136
|
+
Topic.import %i(author_name), values, validate: false
|
|
137
|
+
end
|
|
138
|
+
|
|
139
|
+
assert err.message.include? 'Missing keys: [:author_name]'
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
unless ENV["SKIP_COMPOSITE_PK"]
|
|
145
|
+
describe "with composite primary keys" do
|
|
146
|
+
it "should import models successfully" do
|
|
147
|
+
tags = [Tag.new(tag_id: 1, publisher_id: 1, tag: 'Mystery')]
|
|
148
|
+
|
|
149
|
+
assert_difference "Tag.count", +1 do
|
|
150
|
+
Tag.import tags
|
|
151
|
+
end
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
it "should import array of values successfully" do
|
|
155
|
+
columns = [:tag_id, :publisher_id, :tag]
|
|
156
|
+
values = [[1, 1, 'Mystery'], [2, 1, 'Science']]
|
|
157
|
+
|
|
158
|
+
assert_difference "Tag.count", +2 do
|
|
159
|
+
Tag.import columns, values, validate: false
|
|
160
|
+
end
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
end
|
|
164
|
+
|
|
165
|
+
describe "with STI models" do
|
|
166
|
+
it "should import models successfully" do
|
|
167
|
+
dictionaries = [Dictionary.new(author_name: "Noah Webster", title: "Webster's Dictionary")]
|
|
168
|
+
|
|
169
|
+
assert_difference "Dictionary.count", +1 do
|
|
170
|
+
Dictionary.import dictionaries
|
|
171
|
+
end
|
|
172
|
+
assert_equal "Dictionary", Dictionary.first.type
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
context "with :validation option" do
|
|
177
|
+
let(:columns) { %w(title author_name content) }
|
|
178
|
+
let(:valid_values) { [["LDAP", "Jerry Carter", "Putting Directories to Work."], ["Rails Recipes", "Chad Fowler", "A trusted collection of solutions."]] }
|
|
179
|
+
let(:valid_values_with_context) { [[1111, "Jerry Carter", "1111"], [2222, "Chad Fowler", "2222"]] }
|
|
180
|
+
let(:invalid_values) { [["The RSpec Book", "David Chelimsky", "..."], ["Agile+UX", "", "All about Agile in UX."]] }
|
|
181
|
+
let(:valid_models) { valid_values.map { |title, author_name, content| Topic.new(title: title, author_name: author_name, content: content) } }
|
|
182
|
+
let(:invalid_models) { invalid_values.map { |title, author_name, content| Topic.new(title: title, author_name: author_name, content: content) } }
|
|
183
|
+
|
|
184
|
+
context "with validation checks turned off" do
|
|
185
|
+
it "should import valid data" do
|
|
186
|
+
assert_difference "Topic.count", +2 do
|
|
187
|
+
Topic.import columns, valid_values, validate: false
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
|
|
191
|
+
it "should import invalid data" do
|
|
192
|
+
assert_difference "Topic.count", +2 do
|
|
193
|
+
Topic.import columns, invalid_values, validate: false
|
|
194
|
+
end
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
it 'should raise a specific error if a column does not exist' do
|
|
198
|
+
assert_raises ActiveRecord::Import::MissingColumnError do
|
|
199
|
+
Topic.import ['foo'], [['bar']], validate: false
|
|
200
|
+
end
|
|
201
|
+
end
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
context "with validation checks turned on" do
|
|
205
|
+
it "should import valid data" do
|
|
206
|
+
assert_difference "Topic.count", +2 do
|
|
207
|
+
Topic.import columns, valid_values, validate: true
|
|
208
|
+
end
|
|
209
|
+
end
|
|
210
|
+
|
|
211
|
+
it "should import valid data with on option" do
|
|
212
|
+
assert_difference "Topic.count", +2 do
|
|
213
|
+
Topic.import columns, valid_values_with_context, validate_with_context: :context_test
|
|
214
|
+
end
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
it "should ignore uniqueness validators" do
|
|
218
|
+
Topic.import columns, valid_values
|
|
219
|
+
assert_difference "Topic.count", +2 do
|
|
220
|
+
Topic.import columns, valid_values
|
|
221
|
+
end
|
|
222
|
+
end
|
|
223
|
+
|
|
224
|
+
it "should not alter the callback chain of the model" do
|
|
225
|
+
attributes = columns.zip(valid_values.first).to_h
|
|
226
|
+
topic = Topic.new attributes
|
|
227
|
+
Topic.import [topic], validate: true
|
|
228
|
+
duplicate_topic = Topic.new attributes
|
|
229
|
+
Topic.import [duplicate_topic], validate: true
|
|
230
|
+
assert duplicate_topic.invalid?
|
|
231
|
+
end
|
|
232
|
+
|
|
233
|
+
it "should not import invalid data" do
|
|
234
|
+
assert_no_difference "Topic.count" do
|
|
235
|
+
Topic.import columns, invalid_values, validate: true
|
|
236
|
+
end
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
it "should import invalid data with on option" do
|
|
240
|
+
assert_no_difference "Topic.count" do
|
|
241
|
+
Topic.import columns, valid_values, validate_with_context: :context_test
|
|
242
|
+
end
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
it "should report the failed instances" do
|
|
246
|
+
results = Topic.import columns, invalid_values, validate: true
|
|
247
|
+
assert_equal invalid_values.size, results.failed_instances.size
|
|
248
|
+
assert_not_equal results.failed_instances.first, results.failed_instances.last
|
|
249
|
+
results.failed_instances.each do |e|
|
|
250
|
+
assert_kind_of Topic, e
|
|
251
|
+
assert_equal e.errors.count, 1
|
|
252
|
+
end
|
|
253
|
+
end
|
|
254
|
+
|
|
255
|
+
it "should set ids in valid models if adapter supports setting primary key of imported objects" do
|
|
256
|
+
if ActiveRecord::Base.supports_setting_primary_key_of_imported_objects?
|
|
257
|
+
Topic.import (invalid_models + valid_models), validate: true
|
|
258
|
+
assert_nil invalid_models[0].id
|
|
259
|
+
assert_nil invalid_models[1].id
|
|
260
|
+
assert_equal valid_models[0].id, Topic.all[0].id
|
|
261
|
+
assert_equal valid_models[1].id, Topic.all[1].id
|
|
262
|
+
end
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
it "should set ActiveRecord timestamps in valid models if adapter supports setting primary key of imported objects" do
|
|
266
|
+
if ActiveRecord::Base.supports_setting_primary_key_of_imported_objects?
|
|
267
|
+
Timecop.freeze(Time.at(0)) do
|
|
268
|
+
Topic.import (invalid_models + valid_models), validate: true
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
assert_nil invalid_models[0].created_at
|
|
272
|
+
assert_nil invalid_models[0].updated_at
|
|
273
|
+
assert_nil invalid_models[1].created_at
|
|
274
|
+
assert_nil invalid_models[1].updated_at
|
|
275
|
+
|
|
276
|
+
assert_equal valid_models[0].created_at, Topic.all[0].created_at
|
|
277
|
+
assert_equal valid_models[0].updated_at, Topic.all[0].updated_at
|
|
278
|
+
assert_equal valid_models[1].created_at, Topic.all[1].created_at
|
|
279
|
+
assert_equal valid_models[1].updated_at, Topic.all[1].updated_at
|
|
280
|
+
end
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
it "should import valid data when mixed with invalid data" do
|
|
284
|
+
assert_difference "Topic.count", +2 do
|
|
285
|
+
Topic.import columns, valid_values + invalid_values, validate: true
|
|
286
|
+
end
|
|
287
|
+
assert_equal 0, Topic.where(title: invalid_values.map(&:first)).count
|
|
288
|
+
end
|
|
289
|
+
|
|
290
|
+
it "should run callbacks" do
|
|
291
|
+
assert_no_difference "Topic.count" do
|
|
292
|
+
Topic.import columns, [["invalid", "Jerry Carter"]], validate: true
|
|
293
|
+
end
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
it "should call validation methods" do
|
|
297
|
+
assert_no_difference "Topic.count" do
|
|
298
|
+
Topic.import columns, [["validate_failed", "Jerry Carter"]], validate: true
|
|
299
|
+
end
|
|
300
|
+
end
|
|
301
|
+
end
|
|
302
|
+
|
|
303
|
+
context "with uniqueness validators included" do
|
|
304
|
+
it "should not import duplicate records" do
|
|
305
|
+
Topic.import columns, valid_values
|
|
306
|
+
assert_no_difference "Topic.count" do
|
|
307
|
+
Topic.import columns, valid_values, validate_uniqueness: true
|
|
308
|
+
end
|
|
309
|
+
end
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
context "when validatoring presence of belongs_to association" do
|
|
313
|
+
it "should not import records without foreign key" do
|
|
314
|
+
assert_no_difference "UserToken.count" do
|
|
315
|
+
UserToken.import [:token], [['12345abcdef67890']]
|
|
316
|
+
end
|
|
317
|
+
end
|
|
318
|
+
|
|
319
|
+
it "should import records with foreign key" do
|
|
320
|
+
assert_difference "UserToken.count", +1 do
|
|
321
|
+
UserToken.import [:user_name, :token], [%w("Bob", "12345abcdef67890")]
|
|
322
|
+
end
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
it "should not mutate the defined validations" do
|
|
326
|
+
UserToken.import [:user_name, :token], [%w("Bob", "12345abcdef67890")]
|
|
327
|
+
ut = UserToken.new
|
|
328
|
+
ut.valid?
|
|
329
|
+
assert_includes ut.errors.messages, :user
|
|
330
|
+
end
|
|
331
|
+
end
|
|
332
|
+
end
|
|
333
|
+
|
|
334
|
+
context "without :validation option" do
|
|
335
|
+
let(:columns) { %w(title author_name) }
|
|
336
|
+
let(:invalid_values) { [["The RSpec Book", ""], ["Agile+UX", ""]] }
|
|
337
|
+
|
|
338
|
+
it "should not import invalid data" do
|
|
339
|
+
assert_no_difference "Topic.count" do
|
|
340
|
+
result = Topic.import columns, invalid_values
|
|
341
|
+
assert_equal 2, result.failed_instances.size
|
|
342
|
+
end
|
|
343
|
+
end
|
|
344
|
+
end
|
|
345
|
+
|
|
346
|
+
context "with :all_or_none option" do
|
|
347
|
+
let(:columns) { %w(title author_name) }
|
|
348
|
+
let(:valid_values) { [["LDAP", "Jerry Carter"], ["Rails Recipes", "Chad Fowler"]] }
|
|
349
|
+
let(:invalid_values) { [["The RSpec Book", ""], ["Agile+UX", ""]] }
|
|
350
|
+
let(:mixed_values) { valid_values + invalid_values }
|
|
351
|
+
|
|
352
|
+
context "with validation checks turned on" do
|
|
353
|
+
it "should import valid data" do
|
|
354
|
+
assert_difference "Topic.count", +2 do
|
|
355
|
+
Topic.import columns, valid_values, all_or_none: true
|
|
356
|
+
end
|
|
357
|
+
end
|
|
358
|
+
|
|
359
|
+
it "should not import invalid data" do
|
|
360
|
+
assert_no_difference "Topic.count" do
|
|
361
|
+
Topic.import columns, invalid_values, all_or_none: true
|
|
362
|
+
end
|
|
363
|
+
end
|
|
364
|
+
|
|
365
|
+
it "should not import valid data when mixed with invalid data" do
|
|
366
|
+
assert_no_difference "Topic.count" do
|
|
367
|
+
Topic.import columns, mixed_values, all_or_none: true
|
|
368
|
+
end
|
|
369
|
+
end
|
|
370
|
+
|
|
371
|
+
it "should report the failed instances" do
|
|
372
|
+
results = Topic.import columns, mixed_values, all_or_none: true
|
|
373
|
+
assert_equal invalid_values.size, results.failed_instances.size
|
|
374
|
+
results.failed_instances.each { |e| assert_kind_of Topic, e }
|
|
375
|
+
end
|
|
376
|
+
|
|
377
|
+
it "should report the zero inserts" do
|
|
378
|
+
results = Topic.import columns, mixed_values, all_or_none: true
|
|
379
|
+
assert_equal 0, results.num_inserts
|
|
380
|
+
end
|
|
381
|
+
end
|
|
382
|
+
end
|
|
383
|
+
|
|
384
|
+
context "with :batch_size option" do
|
|
385
|
+
it "should import with a single insert" do
|
|
386
|
+
assert_difference "Topic.count", +10 do
|
|
387
|
+
result = Topic.import Build(10, :topics), batch_size: 10
|
|
388
|
+
assert_equal 1, result.num_inserts if Topic.supports_import?
|
|
389
|
+
end
|
|
390
|
+
end
|
|
391
|
+
|
|
392
|
+
it "should import with multiple inserts" do
|
|
393
|
+
assert_difference "Topic.count", +10 do
|
|
394
|
+
result = Topic.import Build(10, :topics), batch_size: 4
|
|
395
|
+
assert_equal 3, result.num_inserts if Topic.supports_import?
|
|
396
|
+
end
|
|
397
|
+
end
|
|
398
|
+
end
|
|
399
|
+
|
|
400
|
+
context "with :synchronize option" do
|
|
401
|
+
context "synchronizing on new records" do
|
|
402
|
+
let(:new_topics) { Build(3, :topics) }
|
|
403
|
+
|
|
404
|
+
it "doesn't reload any data (doesn't work)" do
|
|
405
|
+
Topic.import new_topics, synchronize: new_topics
|
|
406
|
+
if Topic.supports_setting_primary_key_of_imported_objects?
|
|
407
|
+
assert new_topics.all?(&:persisted?), "Records should have been reloaded"
|
|
408
|
+
else
|
|
409
|
+
assert new_topics.all?(&:new_record?), "No record should have been reloaded"
|
|
410
|
+
end
|
|
411
|
+
end
|
|
412
|
+
end
|
|
413
|
+
|
|
414
|
+
context "synchronizing on new records with explicit conditions" do
|
|
415
|
+
let(:new_topics) { Build(3, :topics) }
|
|
416
|
+
|
|
417
|
+
it "reloads data for existing in-memory instances" do
|
|
418
|
+
Topic.import(new_topics, synchronize: new_topics, synchronize_keys: [:title] )
|
|
419
|
+
assert new_topics.all?(&:persisted?), "Records should have been reloaded"
|
|
420
|
+
end
|
|
421
|
+
end
|
|
422
|
+
|
|
423
|
+
context "synchronizing on destroyed records with explicit conditions" do
|
|
424
|
+
let(:new_topics) { Generate(3, :topics) }
|
|
425
|
+
|
|
426
|
+
it "reloads data for existing in-memory instances" do
|
|
427
|
+
new_topics.each(&:destroy)
|
|
428
|
+
Topic.import(new_topics, synchronize: new_topics, synchronize_keys: [:title] )
|
|
429
|
+
assert new_topics.all?(&:persisted?), "Records should have been reloaded"
|
|
430
|
+
end
|
|
431
|
+
end
|
|
432
|
+
end
|
|
433
|
+
|
|
434
|
+
context "with an array of unsaved model instances" do
|
|
435
|
+
let(:topic) { Build(:topic, title: "The RSpec Book", author_name: "David Chelimsky") }
|
|
436
|
+
let(:topics) { Build(9, :topics) }
|
|
437
|
+
let(:invalid_topics) { Build(7, :invalid_topics) }
|
|
438
|
+
|
|
439
|
+
it "should import records based on those model's attributes" do
|
|
440
|
+
assert_difference "Topic.count", +9 do
|
|
441
|
+
Topic.import topics
|
|
442
|
+
end
|
|
443
|
+
|
|
444
|
+
Topic.import [topic]
|
|
445
|
+
assert Topic.where(title: "The RSpec Book", author_name: "David Chelimsky").first
|
|
446
|
+
end
|
|
447
|
+
|
|
448
|
+
it "should not overwrite existing records" do
|
|
449
|
+
topic = Generate(:topic, title: "foobar")
|
|
450
|
+
assert_no_difference "Topic.count" do
|
|
451
|
+
begin
|
|
452
|
+
Topic.transaction do
|
|
453
|
+
topic.title = "baz"
|
|
454
|
+
Topic.import [topic]
|
|
455
|
+
end
|
|
456
|
+
rescue Exception
|
|
457
|
+
# PostgreSQL raises PgError due to key constraints
|
|
458
|
+
# I don't know why ActiveRecord doesn't catch these. *sigh*
|
|
459
|
+
end
|
|
460
|
+
end
|
|
461
|
+
assert_equal "foobar", topic.reload.title
|
|
462
|
+
end
|
|
463
|
+
|
|
464
|
+
context "with validation checks turned on" do
|
|
465
|
+
it "should import valid models" do
|
|
466
|
+
assert_difference "Topic.count", +9 do
|
|
467
|
+
Topic.import topics, validate: true
|
|
468
|
+
end
|
|
469
|
+
end
|
|
470
|
+
|
|
471
|
+
it "should not import invalid models" do
|
|
472
|
+
assert_no_difference "Topic.count" do
|
|
473
|
+
Topic.import invalid_topics, validate: true
|
|
474
|
+
end
|
|
475
|
+
end
|
|
476
|
+
end
|
|
477
|
+
|
|
478
|
+
context "with validation checks turned off" do
|
|
479
|
+
it "should import invalid models" do
|
|
480
|
+
assert_difference "Topic.count", +7 do
|
|
481
|
+
Topic.import invalid_topics, validate: false
|
|
482
|
+
end
|
|
483
|
+
end
|
|
484
|
+
end
|
|
485
|
+
end
|
|
486
|
+
|
|
487
|
+
context "with an array of columns and an array of unsaved model instances" do
|
|
488
|
+
let(:topics) { Build(2, :topics) }
|
|
489
|
+
|
|
490
|
+
it "should import records populating the supplied columns with the corresponding model instance attributes" do
|
|
491
|
+
assert_difference "Topic.count", +2 do
|
|
492
|
+
Topic.import [:author_name, :title], topics
|
|
493
|
+
end
|
|
494
|
+
|
|
495
|
+
# imported topics should be findable by their imported attributes
|
|
496
|
+
assert Topic.where(author_name: topics.first.author_name).first
|
|
497
|
+
assert Topic.where(author_name: topics.last.author_name).first
|
|
498
|
+
end
|
|
499
|
+
|
|
500
|
+
it "should not populate fields for columns not imported" do
|
|
501
|
+
topics.first.author_email_address = "zach.dennis@gmail.com"
|
|
502
|
+
assert_difference "Topic.count", +2 do
|
|
503
|
+
Topic.import [:author_name, :title], topics
|
|
504
|
+
end
|
|
505
|
+
|
|
506
|
+
assert !Topic.where(author_email_address: "zach.dennis@gmail.com").first
|
|
507
|
+
end
|
|
508
|
+
end
|
|
509
|
+
|
|
510
|
+
context "with an array of columns and an array of values" do
|
|
511
|
+
it "should import ids when specified" do
|
|
512
|
+
Topic.import [:id, :author_name, :title], [[99, "Bob Jones", "Topic 99"]]
|
|
513
|
+
assert_equal 99, Topic.last.id
|
|
514
|
+
end
|
|
515
|
+
|
|
516
|
+
it "ignores the recursive option" do
|
|
517
|
+
assert_difference "Topic.count", +1 do
|
|
518
|
+
Topic.import [:author_name, :title], [["David Chelimsky", "The RSpec Book"]], recursive: true
|
|
519
|
+
end
|
|
520
|
+
end
|
|
521
|
+
end
|
|
522
|
+
|
|
523
|
+
context "ActiveRecord timestamps" do
|
|
524
|
+
let(:time) { Chronic.parse("5 minutes ago") }
|
|
525
|
+
|
|
526
|
+
context "when the timestamps columns are present" do
|
|
527
|
+
setup do
|
|
528
|
+
@existing_book = Book.create(title: "Fell", author_name: "Curry", publisher: "Bayer", created_at: 2.years.ago.utc, created_on: 2.years.ago.utc, updated_at: 2.years.ago.utc, updated_on: 2.years.ago.utc)
|
|
529
|
+
ActiveRecord::Base.default_timezone = :utc
|
|
530
|
+
Timecop.freeze(time) do
|
|
531
|
+
assert_difference "Book.count", +2 do
|
|
532
|
+
Book.import %w(title author_name publisher created_at created_on updated_at updated_on), [["LDAP", "Big Bird", "Del Rey", nil, nil, nil, nil], [@existing_book.title, @existing_book.author_name, @existing_book.publisher, @existing_book.created_at, @existing_book.created_on, @existing_book.updated_at, @existing_book.updated_on]]
|
|
533
|
+
end
|
|
534
|
+
end
|
|
535
|
+
@new_book, @existing_book = Book.last 2
|
|
536
|
+
end
|
|
537
|
+
|
|
538
|
+
it "should set the created_at column for new records" do
|
|
539
|
+
assert_in_delta time.to_i, @new_book.created_at.to_i, 1.second
|
|
540
|
+
end
|
|
541
|
+
|
|
542
|
+
it "should set the created_on column for new records" do
|
|
543
|
+
assert_in_delta time.to_i, @new_book.created_on.to_i, 1.second
|
|
544
|
+
end
|
|
545
|
+
|
|
546
|
+
it "should not set the created_at column for existing records" do
|
|
547
|
+
assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.created_at.strftime("%Y:%d")
|
|
548
|
+
end
|
|
549
|
+
|
|
550
|
+
it "should not set the created_on column for existing records" do
|
|
551
|
+
assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.created_on.strftime("%Y:%d")
|
|
552
|
+
end
|
|
553
|
+
|
|
554
|
+
it "should set the updated_at column for new records" do
|
|
555
|
+
assert_in_delta time.to_i, @new_book.updated_at.to_i, 1.second
|
|
556
|
+
end
|
|
557
|
+
|
|
558
|
+
it "should set the updated_on column for new records" do
|
|
559
|
+
assert_in_delta time.to_i, @new_book.updated_on.to_i, 1.second
|
|
560
|
+
end
|
|
561
|
+
|
|
562
|
+
it "should not set the updated_at column for existing records" do
|
|
563
|
+
assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.updated_at.strftime("%Y:%d")
|
|
564
|
+
end
|
|
565
|
+
|
|
566
|
+
it "should not set the updated_on column for existing records" do
|
|
567
|
+
assert_equal 2.years.ago.utc.strftime("%Y:%d"), @existing_book.updated_on.strftime("%Y:%d")
|
|
568
|
+
end
|
|
569
|
+
|
|
570
|
+
it "should not set the updated_at column on models if changed" do
|
|
571
|
+
timestamp = Time.now.utc
|
|
572
|
+
books = [
|
|
573
|
+
Book.new(author_name: "Foo", title: "Baz", created_at: timestamp, updated_at: timestamp)
|
|
574
|
+
]
|
|
575
|
+
Book.import books
|
|
576
|
+
assert_equal timestamp.strftime("%Y:%d"), Book.last.updated_at.strftime("%Y:%d")
|
|
577
|
+
end
|
|
578
|
+
end
|
|
579
|
+
|
|
580
|
+
context "when a custom time zone is set" do
|
|
581
|
+
setup do
|
|
582
|
+
Timecop.freeze(time) do
|
|
583
|
+
assert_difference "Book.count", +1 do
|
|
584
|
+
Book.import [:title, :author_name, :publisher], [["LDAP", "Big Bird", "Del Rey"]]
|
|
585
|
+
end
|
|
586
|
+
end
|
|
587
|
+
@book = Book.last
|
|
588
|
+
end
|
|
589
|
+
|
|
590
|
+
it "should set the created_at and created_on timestamps for new records" do
|
|
591
|
+
assert_in_delta time.to_i, @book.created_at.to_i, 1.second
|
|
592
|
+
assert_in_delta time.to_i, @book.created_on.to_i, 1.second
|
|
593
|
+
end
|
|
594
|
+
|
|
595
|
+
it "should set the updated_at and updated_on timestamps for new records" do
|
|
596
|
+
assert_in_delta time.to_i, @book.updated_at.to_i, 1.second
|
|
597
|
+
assert_in_delta time.to_i, @book.updated_on.to_i, 1.second
|
|
598
|
+
end
|
|
599
|
+
end
|
|
600
|
+
end
|
|
601
|
+
|
|
602
|
+
context "importing with database reserved words" do
|
|
603
|
+
let(:group) { Build(:group, order: "superx") }
|
|
604
|
+
|
|
605
|
+
it "should import just fine" do
|
|
606
|
+
assert_difference "Group.count", +1 do
|
|
607
|
+
Group.import [group]
|
|
608
|
+
end
|
|
609
|
+
assert_equal "superx", Group.first.order
|
|
610
|
+
end
|
|
611
|
+
end
|
|
612
|
+
|
|
613
|
+
context "importing a datetime field" do
|
|
614
|
+
it "should import a date with YYYY/MM/DD format just fine" do
|
|
615
|
+
Topic.import [:author_name, :title, :last_read], [["Bob Jones", "Topic 2", "2010/05/14"]]
|
|
616
|
+
assert_equal "2010/05/14".to_date, Topic.last.last_read.to_date
|
|
617
|
+
end
|
|
618
|
+
end
|
|
619
|
+
|
|
620
|
+
context "importing through an association scope" do
|
|
621
|
+
{ has_many: :chapters, polymorphic: :discounts }.each do |association_type, association|
|
|
622
|
+
book = FactoryBot.create :book
|
|
623
|
+
scope = book.public_send association
|
|
624
|
+
klass = { chapters: Chapter, discounts: Discount }[association]
|
|
625
|
+
column = { chapters: :title, discounts: :amount }[association]
|
|
626
|
+
val1 = { chapters: 'A', discounts: 5 }[association]
|
|
627
|
+
val2 = { chapters: 'B', discounts: 6 }[association]
|
|
628
|
+
|
|
629
|
+
context "for #{association_type}" do
|
|
630
|
+
it "works importing models" do
|
|
631
|
+
scope.import [
|
|
632
|
+
klass.new(column => val1),
|
|
633
|
+
klass.new(column => val2)
|
|
634
|
+
]
|
|
635
|
+
|
|
636
|
+
assert_equal [val1, val2], scope.map(&column).sort
|
|
637
|
+
end
|
|
638
|
+
|
|
639
|
+
it "works importing array of columns and values" do
|
|
640
|
+
scope.import [column], [[val1], [val2]]
|
|
641
|
+
|
|
642
|
+
assert_equal [val1, val2], scope.map(&column).sort
|
|
643
|
+
end
|
|
644
|
+
|
|
645
|
+
it "works importing array of hashes" do
|
|
646
|
+
scope.import [{ column => val1 }, { column => val2 }]
|
|
647
|
+
|
|
648
|
+
assert_equal [val1, val2], scope.map(&column).sort
|
|
649
|
+
end
|
|
650
|
+
end
|
|
651
|
+
|
|
652
|
+
it "works with a non-standard association primary key" do
|
|
653
|
+
user = User.create(id: 1, name: 'Solomon')
|
|
654
|
+
user.user_tokens.import [:id, :token], [[5, '12345abcdef67890']]
|
|
655
|
+
|
|
656
|
+
token = UserToken.find(5)
|
|
657
|
+
assert_equal 'Solomon', token.user_name
|
|
658
|
+
end
|
|
659
|
+
end
|
|
660
|
+
end
|
|
661
|
+
|
|
662
|
+
context "importing model with polymorphic belongs_to" do
|
|
663
|
+
it "works without error" do
|
|
664
|
+
book = FactoryBot.create :book
|
|
665
|
+
discount = Discount.new(discountable: book)
|
|
666
|
+
|
|
667
|
+
Discount.import([discount])
|
|
668
|
+
|
|
669
|
+
assert_equal 1, Discount.count
|
|
670
|
+
end
|
|
671
|
+
end
|
|
672
|
+
|
|
673
|
+
context 'When importing models with Enum fields' do
|
|
674
|
+
it 'should be able to import enum fields' do
|
|
675
|
+
Book.delete_all if Book.count > 0
|
|
676
|
+
books = [
|
|
677
|
+
Book.new(author_name: "Foo", title: "Baz", status: 0),
|
|
678
|
+
Book.new(author_name: "Foo2", title: "Baz2", status: 1),
|
|
679
|
+
]
|
|
680
|
+
Book.import books
|
|
681
|
+
assert_equal 2, Book.count
|
|
682
|
+
|
|
683
|
+
if ENV['AR_VERSION'].to_i >= 5.0
|
|
684
|
+
assert_equal 'draft', Book.first.read_attribute('status')
|
|
685
|
+
assert_equal 'published', Book.last.read_attribute('status')
|
|
686
|
+
else
|
|
687
|
+
assert_equal 0, Book.first.read_attribute('status')
|
|
688
|
+
assert_equal 1, Book.last.read_attribute('status')
|
|
689
|
+
end
|
|
690
|
+
end
|
|
691
|
+
|
|
692
|
+
it 'should be able to import enum fields with default value' do
|
|
693
|
+
Book.delete_all if Book.count > 0
|
|
694
|
+
books = [
|
|
695
|
+
Book.new(author_name: "Foo", title: "Baz")
|
|
696
|
+
]
|
|
697
|
+
Book.import books
|
|
698
|
+
assert_equal 1, Book.count
|
|
699
|
+
|
|
700
|
+
if ENV['AR_VERSION'].to_i >= 5.0
|
|
701
|
+
assert_equal 'draft', Book.first.read_attribute('status')
|
|
702
|
+
else
|
|
703
|
+
assert_equal 0, Book.first.read_attribute('status')
|
|
704
|
+
end
|
|
705
|
+
end
|
|
706
|
+
|
|
707
|
+
if ENV['AR_VERSION'].to_f > 4.1
|
|
708
|
+
it 'should be able to import enum fields by name' do
|
|
709
|
+
Book.delete_all if Book.count > 0
|
|
710
|
+
books = [
|
|
711
|
+
Book.new(author_name: "Foo", title: "Baz", status: :draft),
|
|
712
|
+
Book.new(author_name: "Foo2", title: "Baz2", status: :published),
|
|
713
|
+
]
|
|
714
|
+
Book.import books
|
|
715
|
+
assert_equal 2, Book.count
|
|
716
|
+
|
|
717
|
+
if ENV['AR_VERSION'].to_i >= 5.0
|
|
718
|
+
assert_equal 'draft', Book.first.read_attribute('status')
|
|
719
|
+
assert_equal 'published', Book.last.read_attribute('status')
|
|
720
|
+
else
|
|
721
|
+
assert_equal 0, Book.first.read_attribute('status')
|
|
722
|
+
assert_equal 1, Book.last.read_attribute('status')
|
|
723
|
+
end
|
|
724
|
+
end
|
|
725
|
+
end
|
|
726
|
+
end
|
|
727
|
+
|
|
728
|
+
context 'When importing arrays of values with Enum fields' do
|
|
729
|
+
let(:columns) { [:author_name, :title, :status] }
|
|
730
|
+
let(:values) { [['Author #1', 'Book #1', 0], ['Author #2', 'Book #2', 1]] }
|
|
731
|
+
|
|
732
|
+
it 'should be able to import enum fields' do
|
|
733
|
+
Book.delete_all if Book.count > 0
|
|
734
|
+
Book.import columns, values
|
|
735
|
+
assert_equal 2, Book.count
|
|
736
|
+
|
|
737
|
+
if ENV['AR_VERSION'].to_i >= 5.0
|
|
738
|
+
assert_equal 'draft', Book.first.read_attribute('status')
|
|
739
|
+
assert_equal 'published', Book.last.read_attribute('status')
|
|
740
|
+
else
|
|
741
|
+
assert_equal 0, Book.first.read_attribute('status')
|
|
742
|
+
assert_equal 1, Book.last.read_attribute('status')
|
|
743
|
+
end
|
|
744
|
+
end
|
|
745
|
+
end
|
|
746
|
+
|
|
747
|
+
context 'importing arrays of values with boolean fields' do
|
|
748
|
+
let(:columns) { [:author_name, :title, :for_sale] }
|
|
749
|
+
|
|
750
|
+
it 'should be able to coerce integers as boolean fields' do
|
|
751
|
+
Book.delete_all if Book.count > 0
|
|
752
|
+
values = [['Author #1', 'Book #1', 0], ['Author #2', 'Book #2', 1]]
|
|
753
|
+
assert_difference "Book.count", +2 do
|
|
754
|
+
Book.import columns, values
|
|
755
|
+
end
|
|
756
|
+
assert_equal false, Book.first.for_sale
|
|
757
|
+
assert_equal true, Book.last.for_sale
|
|
758
|
+
end
|
|
759
|
+
|
|
760
|
+
it 'should be able to coerce strings as boolean fields' do
|
|
761
|
+
Book.delete_all if Book.count > 0
|
|
762
|
+
values = [['Author #1', 'Book #1', 'false'], ['Author #2', 'Book #2', 'true']]
|
|
763
|
+
assert_difference "Book.count", +2 do
|
|
764
|
+
Book.import columns, values
|
|
765
|
+
end
|
|
766
|
+
assert_equal false, Book.first.for_sale
|
|
767
|
+
assert_equal true, Book.last.for_sale
|
|
768
|
+
end
|
|
769
|
+
end
|
|
770
|
+
|
|
771
|
+
describe "importing when model has default_scope" do
|
|
772
|
+
it "doesn't import the default scope values" do
|
|
773
|
+
assert_difference "Widget.unscoped.count", +2 do
|
|
774
|
+
Widget.import [:w_id], [[1], [2]]
|
|
775
|
+
end
|
|
776
|
+
default_scope_value = Widget.scope_attributes[:active]
|
|
777
|
+
assert_not_equal default_scope_value, Widget.unscoped.find_by_w_id(1)
|
|
778
|
+
assert_not_equal default_scope_value, Widget.unscoped.find_by_w_id(2)
|
|
779
|
+
end
|
|
780
|
+
|
|
781
|
+
it "imports columns that are a part of the default scope using the value specified" do
|
|
782
|
+
assert_difference "Widget.unscoped.count", +2 do
|
|
783
|
+
Widget.import [:w_id, :active], [[1, true], [2, false]]
|
|
784
|
+
end
|
|
785
|
+
assert_not_equal true, Widget.unscoped.find_by_w_id(1)
|
|
786
|
+
assert_not_equal false, Widget.unscoped.find_by_w_id(2)
|
|
787
|
+
end
|
|
788
|
+
end
|
|
789
|
+
|
|
790
|
+
describe "importing serialized fields" do
|
|
791
|
+
it "imports values for serialized Hash fields" do
|
|
792
|
+
assert_difference "Widget.unscoped.count", +1 do
|
|
793
|
+
Widget.import [:w_id, :data], [[1, { a: :b }]]
|
|
794
|
+
end
|
|
795
|
+
assert_equal({ a: :b }, Widget.find_by_w_id(1).data)
|
|
796
|
+
end
|
|
797
|
+
|
|
798
|
+
it "imports values for serialized fields" do
|
|
799
|
+
assert_difference "Widget.unscoped.count", +1 do
|
|
800
|
+
Widget.import [:w_id, :unspecified_data], [[1, { a: :b }]]
|
|
801
|
+
end
|
|
802
|
+
assert_equal({ a: :b }, Widget.find_by_w_id(1).unspecified_data)
|
|
803
|
+
end
|
|
804
|
+
|
|
805
|
+
it "imports values for custom coder" do
|
|
806
|
+
assert_difference "Widget.unscoped.count", +1 do
|
|
807
|
+
Widget.import [:w_id, :custom_data], [[1, { a: :b }]]
|
|
808
|
+
end
|
|
809
|
+
assert_equal({ a: :b }, Widget.find_by_w_id(1).custom_data)
|
|
810
|
+
end
|
|
811
|
+
|
|
812
|
+
let(:data) { { a: :b } }
|
|
813
|
+
it "imports values for serialized JSON fields" do
|
|
814
|
+
assert_difference "Widget.unscoped.count", +1 do
|
|
815
|
+
Widget.import [:w_id, :json_data], [[9, data]]
|
|
816
|
+
end
|
|
817
|
+
assert_equal(data.as_json, Widget.find_by_w_id(9).json_data)
|
|
818
|
+
end
|
|
819
|
+
|
|
820
|
+
it "imports serialized values from saved records" do
|
|
821
|
+
Widget.import [:w_id, :json_data], [[1, data]]
|
|
822
|
+
assert_equal data.as_json, Widget.last.json_data
|
|
823
|
+
|
|
824
|
+
w = Widget.last
|
|
825
|
+
w.w_id = 2
|
|
826
|
+
Widget.import([w])
|
|
827
|
+
assert_equal data.as_json, Widget.last.json_data
|
|
828
|
+
end
|
|
829
|
+
|
|
830
|
+
context "with a store" do
|
|
831
|
+
it "imports serialized attributes set using accessors" do
|
|
832
|
+
vendors = [Vendor.new(name: 'Vendor 1', color: 'blue')]
|
|
833
|
+
assert_difference "Vendor.count", +1 do
|
|
834
|
+
Vendor.import vendors
|
|
835
|
+
end
|
|
836
|
+
assert_equal('blue', Vendor.first.color)
|
|
837
|
+
end
|
|
838
|
+
end
|
|
839
|
+
end
|
|
840
|
+
|
|
841
|
+
describe "#import!" do
|
|
842
|
+
context "with an array of unsaved model instances" do
|
|
843
|
+
let(:topics) { Build(2, :topics) }
|
|
844
|
+
let(:invalid_topics) { Build(2, :invalid_topics) }
|
|
845
|
+
|
|
846
|
+
context "with invalid data" do
|
|
847
|
+
it "should raise ActiveRecord::RecordInvalid" do
|
|
848
|
+
assert_no_difference "Topic.count" do
|
|
849
|
+
assert_raise ActiveRecord::RecordInvalid do
|
|
850
|
+
Topic.import! invalid_topics
|
|
851
|
+
end
|
|
852
|
+
end
|
|
853
|
+
end
|
|
854
|
+
end
|
|
855
|
+
|
|
856
|
+
context "with valid data" do
|
|
857
|
+
it "should import data" do
|
|
858
|
+
assert_difference "Topic.count", +2 do
|
|
859
|
+
Topic.import! topics
|
|
860
|
+
end
|
|
861
|
+
end
|
|
862
|
+
end
|
|
863
|
+
end
|
|
864
|
+
|
|
865
|
+
context "with array of columns and array of values" do
|
|
866
|
+
let(:columns) { %w(title author_name) }
|
|
867
|
+
let(:valid_values) { [["LDAP", "Jerry Carter"], ["Rails Recipes", "Chad Fowler"]] }
|
|
868
|
+
let(:invalid_values) { [["Rails Recipes", "Chad Fowler"], ["The RSpec Book", ""], ["Agile+UX", ""]] }
|
|
869
|
+
|
|
870
|
+
context "with invalid data" do
|
|
871
|
+
it "should raise ActiveRecord::RecordInvalid" do
|
|
872
|
+
assert_no_difference "Topic.count" do
|
|
873
|
+
assert_raise ActiveRecord::RecordInvalid do
|
|
874
|
+
Topic.import! columns, invalid_values
|
|
875
|
+
end
|
|
876
|
+
end
|
|
877
|
+
end
|
|
878
|
+
end
|
|
879
|
+
|
|
880
|
+
context "with valid data" do
|
|
881
|
+
it "should import data" do
|
|
882
|
+
assert_difference "Topic.count", +2 do
|
|
883
|
+
Topic.import! columns, valid_values
|
|
884
|
+
end
|
|
885
|
+
end
|
|
886
|
+
end
|
|
887
|
+
end
|
|
888
|
+
|
|
889
|
+
context "with objects that respond to .to_sql as values" do
|
|
890
|
+
let(:columns) { %w(title author_name) }
|
|
891
|
+
let(:valid_values) { [["LDAP", Book.select("'Jerry Carter'").limit(1)], ["Rails Recipes", Book.select("'Chad Fowler'").limit(1)]] }
|
|
892
|
+
|
|
893
|
+
it "should import data" do
|
|
894
|
+
assert_difference "Topic.count", +2 do
|
|
895
|
+
Topic.import! columns, valid_values
|
|
896
|
+
topics = Topic.all
|
|
897
|
+
assert_equal "Jerry Carter", topics.first.author_name
|
|
898
|
+
assert_equal "Chad Fowler", topics.last.author_name
|
|
899
|
+
end
|
|
900
|
+
end
|
|
901
|
+
end
|
|
902
|
+
end
|
|
903
|
+
end
|