activerecord-import 0.19.1 → 0.20.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 2ccaefe9611ff80d350abf4beca0e741a23c0167
4
- data.tar.gz: 374da8395b259808c29d102df17bde738046d428
3
+ metadata.gz: b4613a9ba2a76e71320a6e8107b9b557f2413212
4
+ data.tar.gz: 2fb7c7b7a5f801cd591e5cea93b088f0f6d52499
5
5
  SHA512:
6
- metadata.gz: 527652ab74bcf4a44014ba016b846f403e0d94a1b57450d36ea35a65702fcd15a9c273ab82de42278c0fc1c7dd84baee16425d0afab17d425ba36cebbb500d56
7
- data.tar.gz: 82f9e31dfe9c002eddedb59e34b4a6f3d3763504f5932d8fbd15a8c39630ae385a5fdbe6006d5aaef5841a6d158eedc19f8158b090f9e1fd7fd43ef219d6aa8d
6
+ metadata.gz: 72af6c0e217ef61e3bb87b1d078193d979f7d42e34dc5c721d5a26e72c50f1ce2b5a4a68808a2e4514b4a48135017c96cfef29e0dff4283fbc801d87b5039e18
7
+ data.tar.gz: 8b22f10f28ac4e0d7f7d66c4422abb6c4d7ff616a108ad5192cd4ba257e464c15b0dcb1f802c97a4f27bce1e6b57edaa48903d9ccfe68e75cd6c89ec0fa1229e
@@ -1,3 +1,18 @@
1
+ ## Changes in 0.20.0
2
+
3
+ ### New Features
4
+
5
+ * Allow returning columns to be specified for PostgreSQL. Thanks to
6
+ @tjwp via \#433.
7
+
8
+ ### Fixes
9
+
10
+ * Fixes an issue when bypassing uniqueness validators. Thanks to @vmaxv via \#444.
11
+ * For AR < 4.2, prevent type casting for binary columns on Postgresql. Thanks to @mwalsher via \#446.
12
+ * Fix issue logging class name on import. Thanks to @sophylee, @jkowens via \#447.
13
+ * Copy belongs_to association id to foreign key column before importing. Thanks to @jkowens via \#448.
14
+ * Reset model instance on validate. Thanks to @vmaxv via \#449.
15
+
1
16
  ## Changes in 0.19.1
2
17
 
3
18
  ### Fixes
data/Gemfile CHANGED
@@ -3,7 +3,7 @@ source 'https://rubygems.org'
3
3
  gemspec
4
4
 
5
5
  group :development, :test do
6
- gem 'rubocop', '~> 0.38.0'
6
+ gem 'rubocop', '~> 0.40.0'
7
7
  gem 'rake'
8
8
  end
9
9
 
@@ -42,7 +42,8 @@ module BenchmarkOptionParser
42
42
  table_types: {},
43
43
  delete_on_finish: true,
44
44
  number_of_objects: [],
45
- outputs: [] )
45
+ outputs: []
46
+ )
46
47
 
47
48
  opt_parser = OptionParser.new do |opts|
48
49
  opts.banner = BANNER
@@ -16,7 +16,7 @@ module ActiveRecord::Import::AbstractAdapter
16
16
  sql2insert = base_sql + values.join( ',' ) + post_sql
17
17
  insert( sql2insert, *args )
18
18
 
19
- [number_of_inserts, []]
19
+ ActiveRecord::Import::Result.new([], number_of_inserts, [], [])
20
20
  end
21
21
 
22
22
  def pre_sql_statements(options)
@@ -49,7 +49,7 @@ module ActiveRecord::Import::MysqlAdapter
49
49
  end
50
50
  end
51
51
 
52
- [number_of_inserts, []]
52
+ ActiveRecord::Import::Result.new([], number_of_inserts, [], [])
53
53
  end
54
54
 
55
55
  # Returns the maximum number of bytes that the server will allow
@@ -5,9 +5,10 @@ module ActiveRecord::Import::PostgreSQLAdapter
5
5
  MIN_VERSION_FOR_UPSERT = 90_500
6
6
 
7
7
  def insert_many( sql, values, options = {}, *args ) # :nodoc:
8
- primary_key = options[:primary_key]
9
8
  number_of_inserts = 1
9
+ returned_values = []
10
10
  ids = []
11
+ results = []
11
12
 
12
13
  base_sql, post_sql = if sql.is_a?( String )
13
14
  [sql, '']
@@ -17,11 +18,12 @@ module ActiveRecord::Import::PostgreSQLAdapter
17
18
 
18
19
  sql2insert = base_sql + values.join( ',' ) + post_sql
19
20
 
20
- if primary_key.blank? || options[:no_returning]
21
+ columns = returning_columns(options)
22
+ if columns.blank? || options[:no_returning]
21
23
  insert( sql2insert, *args )
22
24
  else
23
- ids = if primary_key.is_a?( Array )
24
- # Select composite primary keys
25
+ returned_values = if columns.size > 1
26
+ # Select composite columns
25
27
  select_rows( sql2insert, *args )
26
28
  else
27
29
  select_values( sql2insert, *args )
@@ -29,7 +31,34 @@ module ActiveRecord::Import::PostgreSQLAdapter
29
31
  query_cache.clear if query_cache_enabled
30
32
  end
31
33
 
32
- [number_of_inserts, ids]
34
+ if options[:returning].blank?
35
+ ids = returned_values
36
+ elsif options[:primary_key].blank?
37
+ results = returned_values
38
+ else
39
+ # split primary key and returning columns
40
+ ids, results = split_ids_and_results(returned_values, columns, options)
41
+ end
42
+
43
+ ActiveRecord::Import::Result.new([], number_of_inserts, ids, results)
44
+ end
45
+
46
+ def split_ids_and_results(values, columns, options)
47
+ ids = []
48
+ results = []
49
+ id_indexes = Array(options[:primary_key]).map { |key| columns.index(key) }
50
+ returning_indexes = Array(options[:returning]).map { |key| columns.index(key) }
51
+
52
+ values.each do |value|
53
+ value_array = Array(value)
54
+ ids << id_indexes.map { |i| value_array[i] }
55
+ results << returning_indexes.map { |i| value_array[i] }
56
+ end
57
+
58
+ ids.map!(&:first) if id_indexes.size == 1
59
+ results.map!(&:first) if returning_indexes.size == 1
60
+
61
+ [ids, results]
33
62
  end
34
63
 
35
64
  def next_value_for_sequence(sequence_name)
@@ -50,14 +79,21 @@ module ActiveRecord::Import::PostgreSQLAdapter
50
79
 
51
80
  sql += super(table_name, options)
52
81
 
53
- unless options[:primary_key].blank? || options[:no_returning]
54
- primary_key = Array(options[:primary_key])
55
- sql << " RETURNING \"#{primary_key.join('", "')}\""
82
+ columns = returning_columns(options)
83
+ unless columns.blank? || options[:no_returning]
84
+ sql << " RETURNING \"#{columns.join('", "')}\""
56
85
  end
57
86
 
58
87
  sql
59
88
  end
60
89
 
90
+ def returning_columns(options)
91
+ columns = []
92
+ columns += Array(options[:primary_key]) if options[:primary_key].present?
93
+ columns |= Array(options[:returning]) if options[:returning].present?
94
+ columns
95
+ end
96
+
61
97
  # Add a column to be updated on duplicate key update
62
98
  def add_column_for_on_duplicate_key_update( column, options = {} ) # :nodoc:
63
99
  arg = options[:on_duplicate_key_update]
@@ -37,7 +37,7 @@ module ActiveRecord::Import::SQLite3Adapter
37
37
  end
38
38
  end
39
39
 
40
- [number_of_inserts, []]
40
+ ActiveRecord::Import::Result.new([], number_of_inserts, [], [])
41
41
  end
42
42
 
43
43
  def pre_sql_statements( options)
@@ -3,7 +3,7 @@ require "ostruct"
3
3
  module ActiveRecord::Import::ConnectionAdapters; end
4
4
 
5
5
  module ActiveRecord::Import #:nodoc:
6
- Result = Struct.new(:failed_instances, :num_inserts, :ids)
6
+ Result = Struct.new(:failed_instances, :num_inserts, :ids, :results)
7
7
 
8
8
  module ImportSupport #:nodoc:
9
9
  def supports_import? #:nodoc:
@@ -38,7 +38,7 @@ module ActiveRecord::Import #:nodoc:
38
38
  model.errors.clear
39
39
 
40
40
  validate_callbacks = model._validate_callbacks.dup
41
- validate_callbacks.each do |callback|
41
+ model._validate_callbacks.each do |callback|
42
42
  validate_callbacks.delete(callback) if callback.raw_filter.is_a? ActiveRecord::Validations::UniquenessValidator
43
43
  end
44
44
 
@@ -214,18 +214,18 @@ class ActiveRecord::Base
214
214
  #
215
215
  # == Options
216
216
  # * +validate+ - true|false, tells import whether or not to use
217
- # ActiveRecord validations. Validations are enforced by default.
217
+ # ActiveRecord validations. Validations are enforced by default.
218
218
  # * +ignore+ - true|false, an alias for on_duplicate_key_ignore.
219
219
  # * +on_duplicate_key_ignore+ - true|false, tells import to discard
220
- # records that contain duplicate keys. For Postgres 9.5+ it adds
221
- # ON CONFLICT DO NOTHING, for MySQL it uses INSERT IGNORE, and for
222
- # SQLite it uses INSERT OR IGNORE. Cannot be enabled on a
223
- # recursive import. For database adapters that normally support
224
- # setting primary keys on imported objects, this option prevents
225
- # that from occurring.
220
+ # records that contain duplicate keys. For Postgres 9.5+ it adds
221
+ # ON CONFLICT DO NOTHING, for MySQL it uses INSERT IGNORE, and for
222
+ # SQLite it uses INSERT OR IGNORE. Cannot be enabled on a
223
+ # recursive import. For database adapters that normally support
224
+ # setting primary keys on imported objects, this option prevents
225
+ # that from occurring.
226
226
  # * +on_duplicate_key_update+ - an Array or Hash, tells import to
227
- # use MySQL's ON DUPLICATE KEY UPDATE or Postgres 9.5+ ON CONFLICT
228
- # DO UPDATE ability. See On Duplicate Key Update below.
227
+ # use MySQL's ON DUPLICATE KEY UPDATE or Postgres 9.5+ ON CONFLICT
228
+ # DO UPDATE ability. See On Duplicate Key Update below.
229
229
  # * +synchronize+ - an array of ActiveRecord instances for the model
230
230
  # that you are currently importing data into. This synchronizes
231
231
  # existing model instances in memory with updates from the import.
@@ -323,7 +323,7 @@ class ActiveRecord::Base
323
323
  # conflicting constraint to be explicitly specified. Using this option
324
324
  # allows you to specify a constraint other than the primary key.
325
325
  #
326
- # ====== :conflict_target
326
+ # ===== :conflict_target
327
327
  #
328
328
  # The :conflict_target attribute specifies the columns that make up the
329
329
  # conflicting unique constraint and can be a single column or an array of
@@ -333,7 +333,7 @@ class ActiveRecord::Base
333
333
  #
334
334
  # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: [ :author_id, :slug ], columns: [ :date_modified ] }
335
335
  #
336
- # ====== :index_predicate
336
+ # ===== :index_predicate
337
337
  #
338
338
  # The :index_predicate attribute optionally specifies a WHERE condition
339
339
  # on :conflict_target, which is required for matching against partial
@@ -342,7 +342,7 @@ class ActiveRecord::Base
342
342
  #
343
343
  # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: [ :author_id, :slug ], index_predicate: 'status <> 0', columns: [ :date_modified ] }
344
344
  #
345
- # ====== :constraint_name
345
+ # ===== :constraint_name
346
346
  #
347
347
  # The :constraint_name attribute explicitly identifies the conflicting
348
348
  # unique index by name. Postgres documentation discourages using this method
@@ -350,7 +350,7 @@ class ActiveRecord::Base
350
350
  #
351
351
  # BlogPost.import columns, values, on_duplicate_key_update: { constraint_name: :blog_posts_pkey, columns: [ :date_modified ] }
352
352
  #
353
- # ====== :condition
353
+ # ===== :condition
354
354
  #
355
355
  # The :condition attribute optionally specifies a WHERE condition
356
356
  # on :conflict_action. Only rows for which this expression returns true will be updated.
@@ -358,12 +358,12 @@ class ActiveRecord::Base
358
358
  # Below is an example:
359
359
  #
360
360
  # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: [ :author_id ], condition: "blog_posts.title NOT LIKE '%sample%'", columns: [ :author_name ] }
361
-
362
- # ====== :columns
361
+ #
362
+ # ===== :columns
363
363
  #
364
364
  # The :columns attribute can be either an Array or a Hash.
365
365
  #
366
- # ======== Using an Array
366
+ # ===== Using an Array
367
367
  #
368
368
  # The :columns attribute can be an array of column names. The column names
369
369
  # are the only fields that are updated if a duplicate record is found.
@@ -371,7 +371,7 @@ class ActiveRecord::Base
371
371
  #
372
372
  # BlogPost.import columns, values, on_duplicate_key_update: { conflict_target: :slug, columns: [ :date_modified, :content, :author ] }
373
373
  #
374
- # ======== Using a Hash
374
+ # ===== Using a Hash
375
375
  #
376
376
  # The :columns option can be a hash of column names to model attribute name
377
377
  # mappings. This gives you finer grained control over what fields are updated
@@ -384,6 +384,7 @@ class ActiveRecord::Base
384
384
  # * failed_instances - an array of objects that fails validation and were not committed to the database. An empty array if no validation is performed.
385
385
  # * num_inserts - the number of insert statements it took to import the data
386
386
  # * ids - the primary keys of the imported ids if the adapter supports it, otherwise an empty array.
387
+ # * results - import results if the adapter supports it, otherwise an empty array.
387
388
  def import(*args)
388
389
  if args.first.is_a?( Array ) && args.first.first.is_a?(ActiveRecord::Base)
389
390
  options = {}
@@ -445,6 +446,10 @@ class ActiveRecord::Base
445
446
  end
446
447
 
447
448
  array_of_attributes = models.map do |model|
449
+ if support_setting_primary_key_of_imported_objects?
450
+ load_association_ids(model)
451
+ end
452
+
448
453
  column_names.map do |name|
449
454
  if stored_attrs.key?(name.to_sym) ||
450
455
  serialized_attrs.key?(name) ||
@@ -525,8 +530,7 @@ class ActiveRecord::Base
525
530
  import_with_validations( column_names, array_of_attributes, options )
526
531
  end
527
532
  else
528
- (num_inserts, ids) = import_without_validations_or_callbacks( column_names, array_of_attributes, options )
529
- ActiveRecord::Import::Result.new([], num_inserts, ids)
533
+ import_without_validations_or_callbacks( column_names, array_of_attributes, options )
530
534
  end
531
535
 
532
536
  if options[:synchronize]
@@ -537,7 +541,7 @@ class ActiveRecord::Base
537
541
 
538
542
  # if we have ids, then set the id on the models and mark the models as clean.
539
543
  if models && support_setting_primary_key_of_imported_objects?
540
- set_attributes_and_mark_clean(models, return_obj, timestamps)
544
+ set_attributes_and_mark_clean(models, return_obj, timestamps, options)
541
545
 
542
546
  # if there are auto-save associations on the models we imported that are new, import them as well
543
547
  import_associations(models, options.dup) if options[:recursive]
@@ -566,8 +570,8 @@ class ActiveRecord::Base
566
570
 
567
571
  # keep track of the instance and the position it is currently at. if this fails
568
572
  # validation we'll use the index to remove it from the array_of_attributes
569
- model = new
570
573
  arr.each_with_index do |hsh, i|
574
+ model = new
571
575
  hsh.each_pair { |k, v| model[k] = v }
572
576
  next if validator.valid_model? model
573
577
  raise(ActiveRecord::RecordInvalid, model) if options[:raise_error]
@@ -580,12 +584,12 @@ class ActiveRecord::Base
580
584
 
581
585
  array_of_attributes.compact!
582
586
 
583
- num_inserts, ids = if array_of_attributes.empty? || options[:all_or_none] && failed_instances.any?
584
- [0, []]
587
+ result = if array_of_attributes.empty? || options[:all_or_none] && failed_instances.any?
588
+ ActiveRecord::Import::Result.new([], 0, [], [])
585
589
  else
586
590
  import_without_validations_or_callbacks( column_names, array_of_attributes, options )
587
591
  end
588
- ActiveRecord::Import::Result.new(failed_instances, num_inserts, ids)
592
+ ActiveRecord::Import::Result.new(failed_instances, result.num_inserts, result.ids, result.results)
589
593
  end
590
594
 
591
595
  # Imports the passed in +column_names+ and +array_of_attributes+
@@ -627,6 +631,7 @@ class ActiveRecord::Base
627
631
 
628
632
  number_inserted = 0
629
633
  ids = []
634
+ results = []
630
635
  if supports_import?
631
636
  # generate the sql
632
637
  post_sql_statements = connection.post_sql_statements( quoted_table_name, options )
@@ -637,9 +642,10 @@ class ActiveRecord::Base
637
642
  result = connection.insert_many( [insert_sql, post_sql_statements].flatten,
638
643
  batch_values,
639
644
  options,
640
- "#{self.class.name} Create Many Without Validations Or Callbacks" )
641
- number_inserted += result[0]
642
- ids += result[1]
645
+ "#{model_name} Create Many Without Validations Or Callbacks" )
646
+ number_inserted += result.num_inserts
647
+ ids += result.ids
648
+ results += result.results
643
649
  end
644
650
  else
645
651
  transaction(requires_new: true) do
@@ -649,12 +655,12 @@ class ActiveRecord::Base
649
655
  end
650
656
  end
651
657
  end
652
- [number_inserted, ids]
658
+ ActiveRecord::Import::Result.new([], number_inserted, ids, results)
653
659
  end
654
660
 
655
661
  private
656
662
 
657
- def set_attributes_and_mark_clean(models, import_result, timestamps)
663
+ def set_attributes_and_mark_clean(models, import_result, timestamps, options)
658
664
  return if models.nil?
659
665
  models -= import_result.failed_instances
660
666
 
@@ -670,6 +676,22 @@ class ActiveRecord::Base
670
676
  end
671
677
  end
672
678
 
679
+ if models.size == import_result.results.size
680
+ columns = Array(options[:returning])
681
+ single_column = "#{columns.first}=" if columns.size == 1
682
+ import_result.results.each_with_index do |result, index|
683
+ model = models[index]
684
+
685
+ if single_column
686
+ model.send(single_column, result)
687
+ else
688
+ columns.each_with_index do |column, col_index|
689
+ model.send("#{column}=", result[col_index])
690
+ end
691
+ end
692
+ end
693
+ end
694
+
673
695
  models.each do |model|
674
696
  if model.respond_to?(:clear_changes_information) # Rails 4.0 and higher
675
697
  model.clear_changes_information
@@ -680,6 +702,18 @@ class ActiveRecord::Base
680
702
  end
681
703
  end
682
704
 
705
+ # Sync belongs_to association ids with foreign key field
706
+ def load_association_ids(model)
707
+ association_reflections = model.class.reflect_on_all_associations(:belongs_to)
708
+ association_reflections.each do |association_reflection|
709
+ association = model.association(association_reflection.name)
710
+ association = association.target
711
+ if association && association.id
712
+ model.public_send("#{association_reflection.foreign_key}=", association.id)
713
+ end
714
+ end
715
+ end
716
+
683
717
  def import_associations(models, options)
684
718
  # now, for all the dirty associations, collect them into a new set of models, then recurse.
685
719
  # notes:
@@ -754,7 +788,9 @@ class ActiveRecord::Base
754
788
  if serialized_attributes.include?(column.name)
755
789
  val = serialized_attributes[column.name].dump(val)
756
790
  end
757
- connection_memo.quote(column.type_cast(val), column)
791
+ # Fixes #443 to support binary (i.e. bytea) columns on PG
792
+ val = column.type_cast(val) unless column.type.to_sym == :binary
793
+ connection_memo.quote(val, column)
758
794
  end
759
795
  end
760
796
  end
@@ -1,5 +1,5 @@
1
1
  module ActiveRecord
2
2
  module Import
3
- VERSION = "0.19.1".freeze
3
+ VERSION = "0.20.0".freeze
4
4
  end
5
5
  end
@@ -121,12 +121,12 @@ describe "#import" do
121
121
  end
122
122
 
123
123
  context "with :validation option" do
124
- let(:columns) { %w(title author_name) }
125
- let(:valid_values) { [["LDAP", "Jerry Carter"], ["Rails Recipes", "Chad Fowler"]] }
126
- let(:valid_values_with_context) { [[1111, "Jerry Carter"], [2222, "Chad Fowler"]] }
127
- let(:invalid_values) { [["The RSpec Book", ""], ["Agile+UX", ""]] }
128
- let(:valid_models) { valid_values.map { |title, author_name| Topic.new(title: title, author_name: author_name) } }
129
- let(:invalid_models) { invalid_values.map { |title, author_name| Topic.new(title: title, author_name: author_name) } }
124
+ let(:columns) { %w(title author_name content) }
125
+ let(:valid_values) { [["LDAP", "Jerry Carter", "Putting Directories to Work."], ["Rails Recipes", "Chad Fowler", "A trusted collection of solutions."]] }
126
+ let(:valid_values_with_context) { [[1111, "Jerry Carter", "1111"], [2222, "Chad Fowler", "2222"]] }
127
+ let(:invalid_values) { [["The RSpec Book", "David Chelimsky", "..."], ["Agile+UX", "", "All about Agile in UX."]] }
128
+ let(:valid_models) { valid_values.map { |title, author_name, content| Topic.new(title: title, author_name: author_name, content: content) } }
129
+ let(:invalid_models) { invalid_values.map { |title, author_name, content| Topic.new(title: title, author_name: author_name, content: content) } }
130
130
 
131
131
  context "with validation checks turned off" do
132
132
  it "should import valid data" do
@@ -2,6 +2,8 @@ class Topic < ActiveRecord::Base
2
2
  validates_presence_of :author_name
3
3
  validates :title, numericality: { only_integer: true }, on: :context_test
4
4
  validates :title, uniqueness: true
5
+ validates :content, uniqueness: true
6
+ validates :word_count, numericality: { greater_than: 0 }, if: :content?
5
7
 
6
8
  validate -> { errors.add(:title, :validate_failed) if title == 'validate_failed' }
7
9
  before_validation -> { errors.add(:title, :invalid) if title == 'invalid' }
@@ -10,4 +12,10 @@ class Topic < ActiveRecord::Base
10
12
  belongs_to :parent, class_name: "Topic"
11
13
 
12
14
  composed_of :description, mapping: [%w(title title), %w(author_name author_name)], allow_nil: true, class_name: "TopicDescription"
15
+
16
+ private
17
+
18
+ def word_count
19
+ @word_count ||= content.to_s.scan(/\w+/).count
20
+ end
13
21
  end
@@ -36,6 +36,7 @@ ActiveRecord::Schema.define do
36
36
  t.column :alarm_type, :integer, null: false
37
37
  t.column :status, :integer, null: false
38
38
  t.column :metadata, :text
39
+ t.column :secret_key, :binary
39
40
  t.datetime :created_at
40
41
  t.datetime :updated_at
41
42
  end
@@ -15,6 +15,7 @@ FactoryGirl.define do
15
15
  factory :topic do
16
16
  sequence(:title) { |n| "Title #{n}" }
17
17
  sequence(:author_name) { |n| "Author #{n}" }
18
+ sequence(:content) { |n| "Content #{n}" }
18
19
  end
19
20
 
20
21
  factory :widget do
@@ -64,8 +64,8 @@ def should_support_mysql_import_functionality
64
64
  let(:columns) { %w(id author_name title) }
65
65
 
66
66
  setup do
67
- topics << Topic.create!(title: "LDAP", author_name: "Big Bird")
68
- topics << Topic.create!(title: "Rails Recipes", author_name: "Elmo")
67
+ topics << Topic.create!(title: "LDAP", author_name: "Big Bird", content: "Putting Directories to Work.")
68
+ topics << Topic.create!(title: "Rails Recipes", author_name: "Elmo", content: "A trusted collection of solutions.")
69
69
  end
70
70
 
71
71
  it "synchronizes passed in ActiveRecord model instances with the data just imported" do
@@ -85,6 +85,76 @@ def should_support_postgresql_import_functionality
85
85
  assert_equal [], Book.import(books, no_returning: true).ids
86
86
  end
87
87
  end
88
+
89
+ describe "returning" do
90
+ let(:books) { [Book.new(author_name: "King", title: "It")] }
91
+ let(:result) { Book.import(books, returning: %w(author_name title)) }
92
+ let(:book_id) do
93
+ if RUBY_PLATFORM == 'java' || ENV['AR_VERSION'].to_i >= 5.0
94
+ books.first.id
95
+ else
96
+ books.first.id.to_s
97
+ end
98
+ end
99
+
100
+ it "creates records" do
101
+ assert_difference("Book.count", +1) { result }
102
+ end
103
+
104
+ it "returns ids" do
105
+ result
106
+ assert_equal [book_id], result.ids
107
+ end
108
+
109
+ it "returns specified columns" do
110
+ assert_equal [%w(King It)], result.results
111
+ end
112
+
113
+ context "when primary key and returning overlap" do
114
+ let(:result) { Book.import(books, returning: %w(id title)) }
115
+
116
+ setup { result }
117
+
118
+ it "returns ids" do
119
+ assert_equal [book_id], result.ids
120
+ end
121
+
122
+ it "returns specified columns" do
123
+ assert_equal [[book_id, 'It']], result.results
124
+ end
125
+ end
126
+
127
+ context "setting model attributes" do
128
+ let(:code) { 'abc' }
129
+ let(:discount) { 0.10 }
130
+ let(:original_promotion) do
131
+ Promotion.new(code: code, discount: discount)
132
+ end
133
+ let(:updated_promotion) do
134
+ Promotion.new(code: code, description: 'ABC discount')
135
+ end
136
+ let(:returning_columns) { %w(discount) }
137
+
138
+ setup do
139
+ Promotion.import([original_promotion])
140
+ Promotion.import([updated_promotion],
141
+ on_duplicate_key_update: { conflict_target: %i(code), columns: %i(description) },
142
+ returning: returning_columns)
143
+ end
144
+
145
+ it "sets model attributes" do
146
+ assert_equal updated_promotion.discount, discount
147
+ end
148
+
149
+ context "returning multiple columns" do
150
+ let(:returning_columns) { %w(discount description) }
151
+
152
+ it "sets model attributes" do
153
+ assert_equal updated_promotion.discount, discount
154
+ end
155
+ end
156
+ end
157
+ end
88
158
  end
89
159
 
90
160
  if ENV['AR_VERSION'].to_f >= 4.0
@@ -159,6 +229,17 @@ def should_support_postgresql_import_functionality
159
229
  end
160
230
  end
161
231
  end
232
+
233
+ describe "with binary field" do
234
+ let(:binary_value) { "\xE0'c\xB2\xB0\xB3Bh\\\xC2M\xB1m\\I\xC4r".force_encoding('ASCII-8BIT') }
235
+ it "imports the correct values for binary fields" do
236
+ alarms = [Alarm.new(device_id: 1, alarm_type: 1, status: 1, secret_key: binary_value)]
237
+ assert_difference "Alarm.count", +1 do
238
+ Alarm.import alarms
239
+ end
240
+ assert_equal(binary_value, Alarm.first.secret_key)
241
+ end
242
+ end
162
243
  end
163
244
 
164
245
  def should_support_postgresql_upsert_functionality
@@ -102,6 +102,19 @@ def should_support_recursive_import
102
102
  end
103
103
  end
104
104
 
105
+ it "imports an imported belongs_to association id" do
106
+ books = new_topics[0].books.to_a
107
+ Topic.import new_topics, validate: false
108
+
109
+ assert_difference "Book.count", books.size do
110
+ Book.import books, validate: false
111
+ end
112
+
113
+ books.each do |book|
114
+ assert_not_nil book.topic_id
115
+ end
116
+ end
117
+
105
118
  unless ENV["SKIP_COMPOSITE_PK"]
106
119
  describe "with composite primary keys" do
107
120
  it "should import models and set id" do
@@ -63,7 +63,8 @@ describe ActiveRecord::Import::ValueSetsBytesParser do
63
63
  values = [
64
64
  "('1','2','3')",
65
65
  "('4','5','6')",
66
- "('7','8','9')"]
66
+ "('7','8','9')"
67
+ ]
67
68
 
68
69
  base_sql_size_in_bytes = 15
69
70
  max_bytes = 30
@@ -88,7 +89,8 @@ describe ActiveRecord::Import::ValueSetsBytesParser do
88
89
  # each accented e should be 2 bytes, so each entry is 6 bytes instead of 5
89
90
  values = [
90
91
  "('é')",
91
- "('é')"]
92
+ "('é')"
93
+ ]
92
94
 
93
95
  base_sql_size_in_bytes = 15
94
96
  max_bytes = 26
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: activerecord-import
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.19.1
4
+ version: 0.20.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Zach Dennis
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-07-20 00:00:00.000000000 Z
11
+ date: 2017-09-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activerecord