sequel 5.68.0 → 5.77.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (87) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG +134 -0
  3. data/README.rdoc +3 -3
  4. data/doc/mass_assignment.rdoc +1 -1
  5. data/doc/migration.rdoc +15 -0
  6. data/doc/opening_databases.rdoc +12 -3
  7. data/doc/release_notes/5.69.0.txt +26 -0
  8. data/doc/release_notes/5.70.0.txt +35 -0
  9. data/doc/release_notes/5.71.0.txt +21 -0
  10. data/doc/release_notes/5.72.0.txt +33 -0
  11. data/doc/release_notes/5.73.0.txt +66 -0
  12. data/doc/release_notes/5.74.0.txt +45 -0
  13. data/doc/release_notes/5.75.0.txt +35 -0
  14. data/doc/release_notes/5.76.0.txt +86 -0
  15. data/doc/release_notes/5.77.0.txt +63 -0
  16. data/doc/sharding.rdoc +3 -1
  17. data/doc/testing.rdoc +4 -2
  18. data/lib/sequel/adapters/ibmdb.rb +1 -1
  19. data/lib/sequel/adapters/jdbc/h2.rb +3 -0
  20. data/lib/sequel/adapters/jdbc/hsqldb.rb +2 -0
  21. data/lib/sequel/adapters/jdbc/postgresql.rb +3 -0
  22. data/lib/sequel/adapters/jdbc/sqlanywhere.rb +15 -0
  23. data/lib/sequel/adapters/jdbc/sqlserver.rb +4 -0
  24. data/lib/sequel/adapters/jdbc.rb +10 -6
  25. data/lib/sequel/adapters/mysql.rb +19 -7
  26. data/lib/sequel/adapters/mysql2.rb +2 -2
  27. data/lib/sequel/adapters/odbc/mssql.rb +1 -1
  28. data/lib/sequel/adapters/postgres.rb +6 -5
  29. data/lib/sequel/adapters/shared/db2.rb +12 -0
  30. data/lib/sequel/adapters/shared/mssql.rb +1 -1
  31. data/lib/sequel/adapters/shared/mysql.rb +31 -1
  32. data/lib/sequel/adapters/shared/oracle.rb +4 -6
  33. data/lib/sequel/adapters/shared/postgres.rb +79 -4
  34. data/lib/sequel/adapters/shared/sqlanywhere.rb +10 -4
  35. data/lib/sequel/adapters/shared/sqlite.rb +20 -3
  36. data/lib/sequel/adapters/sqlite.rb +42 -3
  37. data/lib/sequel/adapters/trilogy.rb +117 -0
  38. data/lib/sequel/connection_pool/sharded_threaded.rb +11 -10
  39. data/lib/sequel/connection_pool/sharded_timed_queue.rb +374 -0
  40. data/lib/sequel/connection_pool/threaded.rb +6 -0
  41. data/lib/sequel/connection_pool/timed_queue.rb +16 -3
  42. data/lib/sequel/connection_pool.rb +10 -1
  43. data/lib/sequel/database/connecting.rb +1 -1
  44. data/lib/sequel/database/misc.rb +2 -2
  45. data/lib/sequel/database/schema_methods.rb +9 -2
  46. data/lib/sequel/database/transactions.rb +6 -0
  47. data/lib/sequel/dataset/actions.rb +8 -6
  48. data/lib/sequel/dataset/features.rb +10 -1
  49. data/lib/sequel/dataset/sql.rb +47 -34
  50. data/lib/sequel/extensions/any_not_empty.rb +2 -2
  51. data/lib/sequel/extensions/async_thread_pool.rb +3 -2
  52. data/lib/sequel/extensions/auto_cast_date_and_time.rb +94 -0
  53. data/lib/sequel/extensions/connection_expiration.rb +15 -9
  54. data/lib/sequel/extensions/connection_validator.rb +15 -10
  55. data/lib/sequel/extensions/duplicate_columns_handler.rb +10 -9
  56. data/lib/sequel/extensions/index_caching.rb +5 -1
  57. data/lib/sequel/extensions/migration.rb +52 -13
  58. data/lib/sequel/extensions/named_timezones.rb +1 -1
  59. data/lib/sequel/extensions/pg_array.rb +10 -0
  60. data/lib/sequel/extensions/pg_auto_parameterize_in_array.rb +110 -0
  61. data/lib/sequel/extensions/pg_extended_date_support.rb +4 -4
  62. data/lib/sequel/extensions/pg_json_ops.rb +52 -0
  63. data/lib/sequel/extensions/pg_range.rb +2 -2
  64. data/lib/sequel/extensions/pg_timestamptz.rb +27 -3
  65. data/lib/sequel/extensions/round_timestamps.rb +1 -1
  66. data/lib/sequel/extensions/schema_caching.rb +1 -1
  67. data/lib/sequel/extensions/server_block.rb +2 -1
  68. data/lib/sequel/extensions/transaction_connection_validator.rb +78 -0
  69. data/lib/sequel/model/associations.rb +9 -2
  70. data/lib/sequel/model/base.rb +25 -12
  71. data/lib/sequel/model/dataset_module.rb +3 -0
  72. data/lib/sequel/model/exceptions.rb +15 -3
  73. data/lib/sequel/plugins/column_encryption.rb +27 -6
  74. data/lib/sequel/plugins/defaults_setter.rb +16 -0
  75. data/lib/sequel/plugins/list.rb +5 -2
  76. data/lib/sequel/plugins/mssql_optimistic_locking.rb +8 -38
  77. data/lib/sequel/plugins/optimistic_locking.rb +9 -42
  78. data/lib/sequel/plugins/optimistic_locking_base.rb +55 -0
  79. data/lib/sequel/plugins/paged_operations.rb +181 -0
  80. data/lib/sequel/plugins/pg_auto_constraint_validations.rb +5 -1
  81. data/lib/sequel/plugins/pg_xmin_optimistic_locking.rb +109 -0
  82. data/lib/sequel/plugins/rcte_tree.rb +7 -4
  83. data/lib/sequel/plugins/static_cache.rb +38 -0
  84. data/lib/sequel/plugins/static_cache_cache.rb +5 -1
  85. data/lib/sequel/plugins/validation_helpers.rb +1 -1
  86. data/lib/sequel/version.rb +1 -1
  87. metadata +43 -3
@@ -24,11 +24,23 @@ module Sequel
24
24
  UndefinedAssociation = Class.new(Error)
25
25
  ).name
26
26
 
27
- (
28
27
  # Raised when a mass assignment method is called in strict mode with either a restricted column
29
28
  # or a column without a setter method.
30
- MassAssignmentRestriction = Class.new(Error)
31
- ).name
29
+ class MassAssignmentRestriction < Error
30
+ # The Sequel::Model object related to this exception.
31
+ attr_reader :model
32
+
33
+ # The column related to this exception, as a string.
34
+ attr_reader :column
35
+
36
+ # Create an instance of this class with the model and column set.
37
+ def self.create(msg, model, column)
38
+ e = new("#{msg} for class #{model.class.inspect}")
39
+ e.instance_variable_set(:@model, model)
40
+ e.instance_variable_set(:@column, column)
41
+ e
42
+ end
43
+ end
32
44
 
33
45
  # Exception class raised when +raise_on_save_failure+ is set and validation fails
34
46
  class ValidationFailed < Error
@@ -31,7 +31,6 @@ rescue RuntimeError, OpenSSL::Cipher::CipherError
31
31
  # :nocov:
32
32
  end
33
33
 
34
- require 'base64'
35
34
  require 'securerandom'
36
35
 
37
36
  module Sequel
@@ -326,7 +325,7 @@ module Sequel
326
325
  # DB.alter_table(:ce_test) do
327
326
  # c = Sequel[:encrypted_column_name]
328
327
  # add_constraint(:enc_base64) do
329
- # octet_length(decode(regexp_replace(regexp_replace(c, '_', '/', 'g'), '-', '+', 'g'), 'base64')) >= 65}
328
+ # octet_length(decode(regexp_replace(regexp_replace(c, '_', '/', 'g'), '-', '+', 'g'), 'base64')) >= 65
330
329
  # end
331
330
  # end
332
331
  #
@@ -375,7 +374,7 @@ module Sequel
375
374
  # Decrypt using any supported format and any available key.
376
375
  def decrypt(data)
377
376
  begin
378
- data = Base64.urlsafe_decode64(data)
377
+ data = urlsafe_decode64(data)
379
378
  rescue ArgumentError
380
379
  raise Error, "Unable to decode encrypted column: invalid base64"
381
380
  end
@@ -448,7 +447,7 @@ module Sequel
448
447
  # The prefix string of columns for the given search type and the first configured encryption key.
449
448
  # Used to find values that do not use this prefix in order to perform reencryption.
450
449
  def current_key_prefix(search_type)
451
- Base64.urlsafe_encode64("#{search_type.chr}\0#{@key_id.chr}")
450
+ urlsafe_encode64("#{search_type.chr}\0#{@key_id.chr}")
452
451
  end
453
452
 
454
453
  # The prefix values to search for the given data (an array of strings), assuming the column uses
@@ -472,11 +471,33 @@ module Sequel
472
471
 
473
472
  private
474
473
 
474
+ if RUBY_VERSION >= '2.4'
475
+ def decode64(str)
476
+ str.unpack1("m0")
477
+ end
478
+ # :nocov:
479
+ else
480
+ def decode64(str)
481
+ str.unpack("m0")[0]
482
+ end
483
+ # :nocov:
484
+ end
485
+
486
+ def urlsafe_encode64(bin)
487
+ str = [bin].pack("m0")
488
+ str.tr!("+/", "-_")
489
+ str
490
+ end
491
+
492
+ def urlsafe_decode64(str)
493
+ decode64(str.tr("-_", "+/"))
494
+ end
495
+
475
496
  # An array of strings, one for each configured encryption key, to find encypted values matching
476
497
  # the given data and search format.
477
498
  def _search_prefixes(data, search_type)
478
499
  @key_map.map do |key_id, (key, _)|
479
- Base64.urlsafe_encode64(_search_prefix(data, search_type, key_id, key))
500
+ urlsafe_encode64(_search_prefix(data, search_type, key_id, key))
480
501
  end
481
502
  end
482
503
 
@@ -509,7 +530,7 @@ module Sequel
509
530
  cipher_text << cipher.update(data) if data_size > 0
510
531
  cipher_text << cipher.final
511
532
 
512
- Base64.urlsafe_encode64("#{prefix}#{random_data}#{cipher_iv}#{cipher.auth_tag}#{cipher_text}")
533
+ urlsafe_encode64("#{prefix}#{random_data}#{cipher_iv}#{cipher.auth_tag}#{cipher_text}")
513
534
  end
514
535
  end
515
536
 
@@ -1,5 +1,7 @@
1
1
  # frozen-string-literal: true
2
2
 
3
+ require 'delegate'
4
+
3
5
  module Sequel
4
6
  module Plugins
5
7
  # The defaults_setter plugin makes the column getter methods return the default
@@ -106,6 +108,20 @@ module Sequel
106
108
  lambda{Date.today}
107
109
  when Sequel::CURRENT_TIMESTAMP
108
110
  lambda{dataset.current_datetime}
111
+ when Hash, Array
112
+ v = Marshal.dump(v).freeze
113
+ lambda{Marshal.load(v)}
114
+ when Delegator
115
+ # DelegateClass returns an anonymous case, which cannot be marshalled, so marshal the
116
+ # underlying object and create a new instance of the class with the unmarshalled object.
117
+ klass = v.class
118
+ case o = v.__getobj__
119
+ when Hash, Array
120
+ v = Marshal.dump(o).freeze
121
+ lambda{klass.new(Marshal.load(v))}
122
+ else
123
+ v
124
+ end
109
125
  else
110
126
  v
111
127
  end
@@ -185,10 +185,13 @@ module Sequel
185
185
  end
186
186
 
187
187
  # Set the value of the position_field to the maximum value plus 1 unless the
188
- # position field already has a value.
188
+ # position field already has a value. If the list is empty, the position will
189
+ # be set to the model's +top_of_list+ value.
189
190
  def before_validation
190
191
  unless get_column_value(position_field)
191
- set_column_value("#{position_field}=", list_dataset.max(position_field).to_i+1)
192
+ current_max = list_dataset.max(position_field)
193
+ value = current_max.nil? ? model.top_of_list : current_max.to_i + 1
194
+ set_column_value("#{position_field}=", value)
192
195
  end
193
196
  super
194
197
  end
@@ -26,57 +26,27 @@ module Sequel
26
26
  module MssqlOptimisticLocking
27
27
  # Load the instance_filters plugin into the model.
28
28
  def self.apply(model, opts=OPTS)
29
- model.plugin :instance_filters
29
+ model.plugin(:optimistic_locking_base)
30
30
  end
31
31
 
32
- # Set the lock_column to the :lock_column option (default: :timestamp)
32
+ # Set the lock column
33
33
  def self.configure(model, opts=OPTS)
34
- model.lock_column = opts[:lock_column] || :timestamp
34
+ model.lock_column = opts[:lock_column] || model.lock_column || :timestamp
35
35
  end
36
-
37
- module ClassMethods
38
- # The timestamp/rowversion column containing the version for the current row.
39
- attr_accessor :lock_column
40
-
41
- Plugins.inherited_instance_variables(self, :@lock_column=>nil)
42
- end
43
-
36
+
44
37
  module InstanceMethods
45
- # Add the lock column instance filter to the object before destroying it.
46
- def before_destroy
47
- lock_column_instance_filter
48
- super
49
- end
50
-
51
- # Add the lock column instance filter to the object before updating it.
52
- def before_update
53
- lock_column_instance_filter
54
- super
55
- end
56
-
57
38
  private
58
39
 
59
- # Add the lock column instance filter to the object.
60
- def lock_column_instance_filter
61
- lc = model.lock_column
62
- instance_filter(lc=>Sequel.blob(get_column_value(lc)))
63
- end
64
-
65
- # Clear the instance filters when refreshing, so that attempting to
66
- # refresh after a failed save removes the previous lock column filter
67
- # (the new one will be added before updating).
68
- def _refresh(ds)
69
- clear_instance_filters
70
- super
40
+ # Make the instance filter value a blob.
41
+ def lock_column_instance_filter_value
42
+ Sequel.blob(super)
71
43
  end
72
44
 
73
45
  # Remove the lock column from the columns to update.
74
46
  # SQL Server automatically updates the lock column value, and does not like
75
47
  # it to be assigned.
76
48
  def _save_update_all_columns_hash
77
- v = @values.dup
78
- cc = changed_columns
79
- Array(primary_key).each{|x| v.delete(x) unless cc.include?(x)}
49
+ v = super
80
50
  v.delete(model.lock_column)
81
51
  v
82
52
  end
@@ -12,64 +12,31 @@ module Sequel
12
12
  # p1 = Person[1]
13
13
  # p2 = Person[1]
14
14
  # p1.update(name: 'Jim') # works
15
- # p2.update(name: 'Bob') # raises Sequel::Plugins::OptimisticLocking::Error
15
+ # p2.update(name: 'Bob') # raises Sequel::NoExistingObject
16
16
  #
17
17
  # In order for this plugin to work, you need to make sure that the database
18
- # table has a +lock_version+ column (or other column you name via the lock_column
19
- # class level accessor) that defaults to 0.
18
+ # table has a +lock_version+ column that defaults to 0. To change the column
19
+ # used, provide a +:lock_column+ option when loading the plugin:
20
+ #
21
+ # plugin :optimistic_locking, lock_column: :version
20
22
  #
21
23
  # This plugin relies on the instance_filters plugin.
22
24
  module OptimisticLocking
23
25
  # Exception class raised when trying to update or destroy a stale object.
24
26
  Error = Sequel::NoExistingObject
25
27
 
26
- # Load the instance_filters plugin into the model.
27
28
  def self.apply(model, opts=OPTS)
28
- model.plugin :instance_filters
29
+ model.plugin(:optimistic_locking_base)
29
30
  end
30
31
 
31
- # Set the lock_column to the :lock_column option, or :lock_version if
32
- # that option is not given.
32
+ # Set the lock column
33
33
  def self.configure(model, opts=OPTS)
34
- model.lock_column = opts[:lock_column] || :lock_version
34
+ model.lock_column = opts[:lock_column] || model.lock_column || :lock_version
35
35
  end
36
-
37
- module ClassMethods
38
- # The column holding the version of the lock
39
- attr_accessor :lock_column
40
-
41
- Plugins.inherited_instance_variables(self, :@lock_column=>nil)
42
- end
43
-
36
+
44
37
  module InstanceMethods
45
- # Add the lock column instance filter to the object before destroying it.
46
- def before_destroy
47
- lock_column_instance_filter
48
- super
49
- end
50
-
51
- # Add the lock column instance filter to the object before updating it.
52
- def before_update
53
- lock_column_instance_filter
54
- super
55
- end
56
-
57
38
  private
58
39
 
59
- # Add the lock column instance filter to the object.
60
- def lock_column_instance_filter
61
- lc = model.lock_column
62
- instance_filter(lc=>get_column_value(lc))
63
- end
64
-
65
- # Clear the instance filters when refreshing, so that attempting to
66
- # refresh after a failed save removes the previous lock column filter
67
- # (the new one will be added before updating).
68
- def _refresh(ds)
69
- clear_instance_filters
70
- super
71
- end
72
-
73
40
  # Only update the row if it has the same lock version, and increment the
74
41
  # lock version.
75
42
  def _update_columns(columns)
@@ -0,0 +1,55 @@
1
+ # frozen-string-literal: true
2
+
3
+ module Sequel
4
+ module Plugins
5
+ # Base for other optimistic locking plugins
6
+ module OptimisticLockingBase
7
+ # Load the instance_filters plugin into the model.
8
+ def self.apply(model)
9
+ model.plugin :instance_filters
10
+ end
11
+
12
+ module ClassMethods
13
+ # The column holding the version of the lock
14
+ attr_accessor :lock_column
15
+
16
+ Plugins.inherited_instance_variables(self, :@lock_column=>nil)
17
+ end
18
+
19
+ module InstanceMethods
20
+ # Add the lock column instance filter to the object before destroying it.
21
+ def before_destroy
22
+ lock_column_instance_filter
23
+ super
24
+ end
25
+
26
+ # Add the lock column instance filter to the object before updating it.
27
+ def before_update
28
+ lock_column_instance_filter
29
+ super
30
+ end
31
+
32
+ private
33
+
34
+ # Add the lock column instance filter to the object.
35
+ def lock_column_instance_filter
36
+ instance_filter(model.lock_column=>lock_column_instance_filter_value)
37
+ end
38
+
39
+ # Use the current value of the lock column
40
+ def lock_column_instance_filter_value
41
+ public_send(model.lock_column)
42
+ end
43
+
44
+ # Clear the instance filters when refreshing, so that attempting to
45
+ # refresh after a failed save removes the previous lock column filter
46
+ # (the new one will be added before updating).
47
+ def _refresh(ds)
48
+ clear_instance_filters
49
+ super
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+
@@ -0,0 +1,181 @@
1
+ # frozen-string-literal: true
2
+
3
+ module Sequel
4
+ module Plugins
5
+ # The paged_operations plugin adds +paged_update+ and
6
+ # +paged_delete+ dataset methods. These behave similarly to
7
+ # the default +update+ and +delete+ dataset methods, except
8
+ # that the update or deletion is done in potentially multiple
9
+ # queries (by default, affecting 1000 rows per query).
10
+ # For a large table, this prevents the change from
11
+ # locking the table for a long period of time.
12
+ #
13
+ # Because the point of this is to prevent locking tables for
14
+ # long periods of time, the separate queries are not contained
15
+ # in a transaction, which means if a later query fails,
16
+ # earlier queries will still be committed. You could prevent
17
+ # this by using a transaction manually, but that defeats the
18
+ # purpose of using these methods.
19
+ #
20
+ # Examples:
21
+ #
22
+ # Album.where{name <= 'M'}.paged_update(updated_at: Sequel::CURRENT_TIMESTAMP)
23
+ # # SELECT id FROM albums WHERE (name <= 'M') ORDER BY id LIMIT 1 OFFSET 1000
24
+ # # UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE ((name <= 'M') AND ("id" < 1002))
25
+ # # SELECT id FROM albums WHERE ((name <= 'M') AND (id >= 1002)) ORDER BY id LIMIT 1 OFFSET 1000
26
+ # # UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE ((name <= 'M') AND ("id" < 2002) AND (id >= 1002))
27
+ # # ...
28
+ # # SELECT id FROM albums WHERE ((name <= 'M') AND (id >= 10002)) ORDER BY id LIMIT 1 OFFSET 1000
29
+ # # UPDATE albums SET updated_at = CURRENT_TIMESTAMP WHERE ((name <= 'M') AND (id >= 10002))
30
+ #
31
+ # Album.where{name > 'M'}.paged_delete
32
+ # # SELECT id FROM albums WHERE (name > 'M') ORDER BY id LIMIT 1 OFFSET 1000
33
+ # # DELETE FROM albums WHERE ((name > 'M') AND (id < 1002))
34
+ # # SELECT id FROM albums WHERE (name > 'M') ORDER BY id LIMIT 1 OFFSET 1000
35
+ # # DELETE FROM albums WHERE ((name > 'M') AND (id < 2002))
36
+ # # ...
37
+ # # SELECT id FROM albums WHERE (name > 'M') ORDER BY id LIMIT 1 OFFSET 1000
38
+ # # DELETE FROM albums WHERE (name > 'M')
39
+ #
40
+ # The plugin also adds a +paged_datasets+ method that will yield
41
+ # separate datasets limited in size that in total handle all
42
+ # rows in the receiver:
43
+ #
44
+ # Album.where{name > 'M'}.paged_datasets{|ds| puts ds.sql}
45
+ # # Runs: SELECT id FROM albums WHERE (name <= 'M') ORDER BY id LIMIT 1 OFFSET 1000
46
+ # # Prints: SELECT * FROM albums WHERE ((name <= 'M') AND ("id" < 1002))
47
+ # # Runs: SELECT id FROM albums WHERE ((name <= 'M') AND (id >= 1002)) ORDER BY id LIMIT 1 OFFSET 1000
48
+ # # Prints: SELECT * FROM albums WHERE ((name <= 'M') AND ("id" < 2002) AND (id >= 1002))
49
+ # # ...
50
+ # # Runs: SELECT id FROM albums WHERE ((name <= 'M') AND (id >= 10002)) ORDER BY id LIMIT 1 OFFSET 1000
51
+ # # Prints: SELECT * FROM albums WHERE ((name <= 'M') AND (id >= 10002))
52
+ #
53
+ # To set the number of rows per page, pass a :rows_per_page option:
54
+ #
55
+ # Album.where{name <= 'M'}.paged_update({x: Sequel[:x] + 1}, rows_per_page: 4)
56
+ # # SELECT id FROM albums WHERE (name <= 'M') ORDER BY id LIMIT 1 OFFSET 4
57
+ # # UPDATE albums SET x = x + 1 WHERE ((name <= 'M') AND ("id" < 5))
58
+ # # SELECT id FROM albums WHERE ((name <= 'M') AND (id >= 5)) ORDER BY id LIMIT 1 OFFSET 4
59
+ # # UPDATE albums SET x = x + 1 WHERE ((name <= 'M') AND ("id" < 9) AND (id >= 5))
60
+ # # ...
61
+ # # SELECT id FROM albums WHERE ((name <= 'M') AND (id >= 12345)) ORDER BY id LIMIT 1 OFFSET 4
62
+ # # UPDATE albums SET x = x + 1 WHERE ((name <= 'M') AND (id >= 12345))
63
+ #
64
+ # You should avoid using +paged_update+ or +paged_datasets+
65
+ # with updates that modify the primary key, as such usage is
66
+ # not supported by this plugin.
67
+ #
68
+ # This plugin only supports models with scalar primary keys.
69
+ #
70
+ # Usage:
71
+ #
72
+ # # Make all model subclasses support paged update/delete/datasets
73
+ # # (called before loading subclasses)
74
+ # Sequel::Model.plugin :paged_operations
75
+ #
76
+ # # Make the Album class support paged update/delete/datasets
77
+ # Album.plugin :paged_operations
78
+ module PagedOperations
79
+ module ClassMethods
80
+ Plugins.def_dataset_methods(self, [:paged_datasets, :paged_delete, :paged_update])
81
+ end
82
+
83
+ module DatasetMethods
84
+ # Yield datasets for subsets of the receiver that are limited
85
+ # to no more than 1000 rows (you can configure the number of
86
+ # rows using +:rows_per_page+).
87
+ #
88
+ # Options:
89
+ # :rows_per_page :: The maximum number of rows in each yielded dataset
90
+ # (unless concurrent modifications are made to the table).
91
+ def paged_datasets(opts=OPTS)
92
+ unless defined?(yield)
93
+ return enum_for(:paged_datasets, opts)
94
+ end
95
+
96
+ pk = _paged_operations_pk(:paged_update)
97
+ base_offset_ds = offset_ds = _paged_operations_offset_ds(opts)
98
+ first = nil
99
+
100
+ while last = offset_ds.get(pk)
101
+ ds = where(pk < last)
102
+ ds = ds.where(pk >= first) if first
103
+ yield ds
104
+ first = last
105
+ offset_ds = base_offset_ds.where(pk >= first)
106
+ end
107
+
108
+ ds = self
109
+ ds = ds.where(pk >= first) if first
110
+ yield ds
111
+ nil
112
+ end
113
+
114
+ # Delete all rows of the dataset using using multiple queries so that
115
+ # no more than 1000 rows are deleted at a time (you can configure the
116
+ # number of rows using +:rows_per_page+).
117
+ #
118
+ # Options:
119
+ # :rows_per_page :: The maximum number of rows affected by each DELETE query
120
+ # (unless concurrent modifications are made to the table).
121
+ def paged_delete(opts=OPTS)
122
+ if (db.database_type == :oracle && !supports_fetch_next_rows?) || (db.database_type == :mssql && !is_2012_or_later?)
123
+ raise Error, "paged_delete is not supported on MSSQL/Oracle when using emulated offsets"
124
+ end
125
+ pk = _paged_operations_pk(:paged_delete)
126
+ rows_deleted = 0
127
+ offset_ds = _paged_operations_offset_ds(opts)
128
+ while last = offset_ds.get(pk)
129
+ rows_deleted += where(pk < last).delete
130
+ end
131
+ rows_deleted + delete
132
+ end
133
+
134
+ # Update all rows of the dataset using using multiple queries so that
135
+ # no more than 1000 rows are updated at a time (you can configure the
136
+ # number of rows using +:rows_per_page+). All arguments are
137
+ # passed to Dataset#update.
138
+ #
139
+ # Options:
140
+ # :rows_per_page :: The maximum number of rows affected by each UPDATE query
141
+ # (unless concurrent modifications are made to the table).
142
+ def paged_update(values, opts=OPTS)
143
+ rows_updated = 0
144
+ paged_datasets(opts) do |ds|
145
+ rows_updated += ds.update(values)
146
+ end
147
+ rows_updated
148
+ end
149
+
150
+ private
151
+
152
+ # Run some basic checks common to paged_{datasets,delete,update}
153
+ # and return the primary key to operate on as a Sequel::Identifier.
154
+ def _paged_operations_pk(meth)
155
+ raise Error, "cannot use #{meth} if dataset has a limit or offset" if @opts[:limit] || @opts[:offset]
156
+ if db.database_type == :db2 && db.offset_strategy == :emulate
157
+ raise Error, "the paged_operations plugin is not supported on DB2 when using emulated offsets, set the :offset_strategy Database option to 'limit_offset' or 'offset_fetch'"
158
+ end
159
+
160
+ case pk = model.primary_key
161
+ when Symbol
162
+ Sequel.identifier(pk)
163
+ when Array
164
+ raise Error, "cannot use #{meth} on a model with a composite primary key"
165
+ else
166
+ raise Error, "cannot use #{meth} on a model without a primary key"
167
+ end
168
+ end
169
+
170
+ # The dataset that will be used by paged_{datasets,delete,update}
171
+ # to get the upper limit for the next query.
172
+ def _paged_operations_offset_ds(opts)
173
+ if rows_per_page = opts[:rows_per_page]
174
+ raise Error, ":rows_per_page option must be at least 1" unless rows_per_page >= 1
175
+ end
176
+ _force_primary_key_order.offset(rows_per_page || 1000)
177
+ end
178
+ end
179
+ end
180
+ end
181
+ end
@@ -133,7 +133,11 @@ module Sequel
133
133
  # Dump the in-memory cached metadata to the cache file.
134
134
  def dump_pg_auto_constraint_validations_cache
135
135
  raise Error, "No pg_auto_constraint_validations setup" unless file = @pg_auto_constraint_validations_cache_file
136
- File.open(file, 'wb'){|f| f.write(Marshal.dump(@pg_auto_constraint_validations_cache))}
136
+ pg_auto_constraint_validations_cache = {}
137
+ @pg_auto_constraint_validations_cache.sort.each do |k, v|
138
+ pg_auto_constraint_validations_cache[k] = v
139
+ end
140
+ File.open(file, 'wb'){|f| f.write(Marshal.dump(pg_auto_constraint_validations_cache))}
137
141
  nil
138
142
  end
139
143
 
@@ -0,0 +1,109 @@
1
+ # frozen-string-literal: true
2
+
3
+ module Sequel
4
+ module Plugins
5
+ # This plugin implements optimistic locking mechanism on PostgreSQL based
6
+ # on the xmin of the row. The xmin system column is automatically set to
7
+ # the current transaction id whenever the row is inserted or updated:
8
+ #
9
+ # class Person < Sequel::Model
10
+ # plugin :pg_xmin_optimistic_locking
11
+ # end
12
+ # p1 = Person[1]
13
+ # p2 = Person[1]
14
+ # p1.update(name: 'Jim') # works
15
+ # p2.update(name: 'Bob') # raises Sequel::NoExistingObject
16
+ #
17
+ # The advantage of pg_xmin_optimistic_locking plugin compared to the
18
+ # regular optimistic_locking plugin as that it does not require any
19
+ # additional columns setup on the model. This allows it to be loaded
20
+ # in the base model and have all subclasses automatically use
21
+ # optimistic locking. The disadvantage is that testing can be
22
+ # more difficult if you are modifying the underlying row between
23
+ # when a model is retrieved and when it is saved.
24
+ #
25
+ # This plugin may not work with the class_table_inheritance plugin.
26
+ #
27
+ # This plugin relies on the instance_filters plugin.
28
+ module PgXminOptimisticLocking
29
+ WILDCARD = LiteralString.new('*').freeze
30
+
31
+ # Define the xmin column accessor
32
+ def self.apply(model)
33
+ model.instance_exec do
34
+ plugin(:optimistic_locking_base)
35
+ @lock_column = :xmin
36
+ def_column_accessor(:xmin)
37
+ end
38
+ end
39
+
40
+ # Update the dataset to append the xmin column if it is usable
41
+ # and there is a dataset for the model.
42
+ def self.configure(model)
43
+ model.instance_exec do
44
+ set_dataset(@dataset) if @dataset
45
+ end
46
+ end
47
+
48
+ module ClassMethods
49
+ private
50
+
51
+ # Ensure the dataset selects the xmin column if doing so
52
+ def convert_input_dataset(ds)
53
+ append_xmin_column_if_usable(super)
54
+ end
55
+
56
+ # If the xmin column is not already selected, and selecting it does not
57
+ # raise an error, append it to the selections.
58
+ def append_xmin_column_if_usable(ds)
59
+ select = ds.opts[:select]
60
+
61
+ unless select && select.include?(:xmin)
62
+ xmin_ds = ds.select_append(:xmin)
63
+ begin
64
+ columns = xmin_ds.columns!
65
+ rescue Sequel::DatabaseConnectionError, Sequel::DatabaseDisconnectError
66
+ raise
67
+ rescue Sequel::DatabaseError
68
+ # ignore, could be view, subquery, table returning function, etc.
69
+ else
70
+ ds = xmin_ds if columns.include?(:xmin)
71
+ end
72
+ end
73
+
74
+ ds
75
+ end
76
+ end
77
+
78
+ module InstanceMethods
79
+ private
80
+
81
+ # Only set the lock column instance filter if there is an xmin value.
82
+ def lock_column_instance_filter
83
+ super if @values[:xmin]
84
+ end
85
+
86
+ # Include xmin value when inserting initial row
87
+ def _insert_dataset
88
+ super.returning(WILDCARD, :xmin)
89
+ end
90
+
91
+ # Remove the xmin from the columns to update.
92
+ # PostgreSQL automatically updates the xmin value, and it cannot be assigned.
93
+ def _save_update_all_columns_hash
94
+ v = super
95
+ v.delete(:xmin)
96
+ v
97
+ end
98
+
99
+ # Add an RETURNING clause to fetch the updated xmin when updating the row.
100
+ def _update_without_checking(columns)
101
+ ds = _update_dataset
102
+ rows = ds.clone(ds.send(:default_server_opts, :sql=>ds.returning(:xmin).update_sql(columns))).all
103
+ values[:xmin] = rows.first[:xmin] unless rows.empty?
104
+ rows.length
105
+ end
106
+ end
107
+ end
108
+ end
109
+ end