sequel 5.67.0 → 5.74.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (68) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG +86 -0
  3. data/README.rdoc +3 -3
  4. data/doc/advanced_associations.rdoc +3 -1
  5. data/doc/mass_assignment.rdoc +1 -1
  6. data/doc/migration.rdoc +15 -0
  7. data/doc/opening_databases.rdoc +8 -1
  8. data/doc/release_notes/5.68.0.txt +61 -0
  9. data/doc/release_notes/5.69.0.txt +26 -0
  10. data/doc/release_notes/5.70.0.txt +35 -0
  11. data/doc/release_notes/5.71.0.txt +21 -0
  12. data/doc/release_notes/5.72.0.txt +33 -0
  13. data/doc/release_notes/5.73.0.txt +66 -0
  14. data/doc/release_notes/5.74.0.txt +45 -0
  15. data/doc/sharding.rdoc +3 -1
  16. data/doc/testing.rdoc +1 -1
  17. data/lib/sequel/adapters/ibmdb.rb +1 -1
  18. data/lib/sequel/adapters/jdbc/postgresql.rb +3 -0
  19. data/lib/sequel/adapters/jdbc/sqlanywhere.rb +4 -0
  20. data/lib/sequel/adapters/jdbc/sqlserver.rb +4 -0
  21. data/lib/sequel/adapters/jdbc.rb +10 -6
  22. data/lib/sequel/adapters/mysql.rb +19 -7
  23. data/lib/sequel/adapters/shared/db2.rb +12 -0
  24. data/lib/sequel/adapters/shared/postgres.rb +70 -6
  25. data/lib/sequel/adapters/shared/sqlite.rb +0 -1
  26. data/lib/sequel/adapters/trilogy.rb +117 -0
  27. data/lib/sequel/connection_pool/sharded_threaded.rb +11 -10
  28. data/lib/sequel/connection_pool/sharded_timed_queue.rb +374 -0
  29. data/lib/sequel/connection_pool/threaded.rb +6 -0
  30. data/lib/sequel/connection_pool/timed_queue.rb +16 -3
  31. data/lib/sequel/connection_pool.rb +8 -1
  32. data/lib/sequel/database/connecting.rb +1 -1
  33. data/lib/sequel/database/schema_methods.rb +4 -3
  34. data/lib/sequel/database/transactions.rb +6 -0
  35. data/lib/sequel/dataset/actions.rb +8 -6
  36. data/lib/sequel/extensions/async_thread_pool.rb +3 -2
  37. data/lib/sequel/extensions/connection_expiration.rb +15 -9
  38. data/lib/sequel/extensions/connection_validator.rb +15 -10
  39. data/lib/sequel/extensions/index_caching.rb +5 -1
  40. data/lib/sequel/extensions/migration.rb +18 -5
  41. data/lib/sequel/extensions/pg_array.rb +9 -1
  42. data/lib/sequel/extensions/pg_auto_parameterize_in_array.rb +110 -0
  43. data/lib/sequel/extensions/pg_enum.rb +1 -2
  44. data/lib/sequel/extensions/pg_extended_date_support.rb +10 -2
  45. data/lib/sequel/extensions/pg_json_ops.rb +52 -0
  46. data/lib/sequel/extensions/pg_multirange.rb +1 -1
  47. data/lib/sequel/extensions/pg_range.rb +1 -1
  48. data/lib/sequel/extensions/pg_row.rb +2 -6
  49. data/lib/sequel/extensions/schema_caching.rb +1 -1
  50. data/lib/sequel/extensions/server_block.rb +2 -1
  51. data/lib/sequel/model/base.rb +20 -10
  52. data/lib/sequel/model/dataset_module.rb +3 -0
  53. data/lib/sequel/model/exceptions.rb +15 -3
  54. data/lib/sequel/plugins/column_encryption.rb +26 -5
  55. data/lib/sequel/plugins/constraint_validations.rb +8 -5
  56. data/lib/sequel/plugins/defaults_setter.rb +16 -0
  57. data/lib/sequel/plugins/mssql_optimistic_locking.rb +8 -38
  58. data/lib/sequel/plugins/optimistic_locking.rb +9 -42
  59. data/lib/sequel/plugins/optimistic_locking_base.rb +55 -0
  60. data/lib/sequel/plugins/paged_operations.rb +181 -0
  61. data/lib/sequel/plugins/pg_auto_constraint_validations.rb +8 -2
  62. data/lib/sequel/plugins/pg_xmin_optimistic_locking.rb +109 -0
  63. data/lib/sequel/plugins/static_cache.rb +38 -0
  64. data/lib/sequel/plugins/static_cache_cache.rb +5 -1
  65. data/lib/sequel/plugins/validation_helpers.rb +8 -1
  66. data/lib/sequel/plugins/validation_helpers_generic_type_messages.rb +73 -0
  67. data/lib/sequel/version.rb +1 -1
  68. metadata +37 -2
@@ -159,6 +159,19 @@ module Sequel
159
159
  migration.up = block
160
160
  migration.down = MigrationReverser.new.reverse(&block)
161
161
  end
162
+
163
+ # Creates a revert migration. This is the same as creating
164
+ # the same block with +down+, but it also calls the block and attempts
165
+ # to create a +up+ block that will reverse the changes made by
166
+ # the block. This is designed to revert the changes in the
167
+ # provided block.
168
+ #
169
+ # There are no guarantees that this will work perfectly
170
+ # in all cases, but it works for some simple cases.
171
+ def revert(&block)
172
+ migration.down = block
173
+ migration.up = MigrationReverser.new.reverse(&block)
174
+ end
162
175
  end
163
176
 
164
177
  # Handles the reversing of reversible migrations. Basically records
@@ -270,6 +283,10 @@ module Sequel
270
283
  def rename_column(name, new_name)
271
284
  @actions << [:rename_column, new_name, name]
272
285
  end
286
+
287
+ def set_column_allow_null(name, allow_null=true)
288
+ @actions << [:set_column_allow_null, name, !allow_null]
289
+ end
273
290
  end
274
291
 
275
292
  # The preferred method for writing Sequel migrations, using a DSL:
@@ -478,11 +495,7 @@ module Sequel
478
495
  @use_transactions
479
496
  end
480
497
 
481
- if use_trans
482
- db.transaction(&block)
483
- else
484
- yield
485
- end
498
+ db.transaction(:skip_transaction=>use_trans == false, &block)
486
499
  end
487
500
 
488
501
  # Load the migration file, raising an exception if the file does not define
@@ -233,6 +233,14 @@ module Sequel
233
233
  a
234
234
  when String
235
235
  bound_variable_array_string(a)
236
+ when Float
237
+ if a.infinite?
238
+ a > 0 ? '"Infinity"' : '"-Infinity"'
239
+ elsif a.nan?
240
+ '"NaN"'
241
+ else
242
+ literal(a)
243
+ end
236
244
  else
237
245
  if (s = bound_variable_arg(a, nil)).is_a?(String)
238
246
  bound_variable_array_string(s)
@@ -257,7 +265,7 @@ module Sequel
257
265
  end
258
266
 
259
267
  # Make the column type detection handle registered array types.
260
- def schema_column_type(db_type)
268
+ def schema_array_type(db_type)
261
269
  if (db_type =~ /\A([^(]+)(?:\([^(]+\))?\[\]\z/io) && (type = pg_array_schema_type($1))
262
270
  type
263
271
  else
@@ -0,0 +1,110 @@
1
+ # frozen-string-literal: true
2
+ #
3
+ # The pg_auto_parameterize_in_array extension builds on the pg_auto_parameterize
4
+ # extension, adding support for handling additional types when converting from
5
+ # IN to = ANY and NOT IN to != ALL:
6
+ #
7
+ # DB[:table].where(column: [1.0, 2.0, ...])
8
+ # # Without extension: column IN ($1::numeric, $2:numeric, ...) # bound variables: 1.0, 2.0, ...
9
+ # # With extension: column = ANY($1::numeric[]) # bound variables: [1.0, 2.0, ...]
10
+ #
11
+ # This prevents the use of an unbounded number of bound variables based on the
12
+ # size of the array, as well as using different SQL for different array sizes.
13
+ #
14
+ # The following types are supported when doing the conversions, with the database
15
+ # type used:
16
+ #
17
+ # Float :: if any are infinite or NaN, double precision, otherwise numeric
18
+ # BigDecimal :: numeric
19
+ # Date :: date
20
+ # Time :: timestamp (or timestamptz if pg_timestamptz extension is used)
21
+ # DateTime :: timestamp (or timestamptz if pg_timestamptz extension is used)
22
+ # Sequel::SQLTime :: time
23
+ # Sequel::SQL::Blob :: bytea
24
+ #
25
+ # String values are also supported using the +text+ type, but only if the
26
+ # +:treat_string_list_as_text_array+ Database option is used. This is because
27
+ # treating strings as text can break programs, since the type for
28
+ # literal strings in PostgreSQL is +unknown+, not +text+.
29
+ #
30
+ # The conversion is only done for single dimensional arrays that have more
31
+ # than two elements, where all elements are of the same class (other than
32
+ # nil values).
33
+ #
34
+ # Related module: Sequel::Postgres::AutoParameterizeInArray
35
+
36
+ module Sequel
37
+ module Postgres
38
+ # Enable automatically parameterizing queries.
39
+ module AutoParameterizeInArray
40
+ # Transform column IN (...) expressions into column = ANY($)
41
+ # and column NOT IN (...) expressions into column != ALL($)
42
+ # using an array bound variable for the ANY/ALL argument,
43
+ # if all values inside the predicate are of the same type and
44
+ # the type is handled by the extension.
45
+ # This is the same optimization PostgreSQL performs internally,
46
+ # but this reduces the number of bound variables.
47
+ def complex_expression_sql_append(sql, op, args)
48
+ case op
49
+ when :IN, :"NOT IN"
50
+ l, r = args
51
+ if auto_param?(sql) && (type = _bound_variable_type_for_array(r))
52
+ if op == :IN
53
+ op = :"="
54
+ func = :ANY
55
+ else
56
+ op = :!=
57
+ func = :ALL
58
+ end
59
+ args = [l, Sequel.function(func, Sequel.pg_array(r, type))]
60
+ end
61
+ end
62
+
63
+ super
64
+ end
65
+
66
+ private
67
+
68
+ # The bound variable type string to use for the bound variable array.
69
+ # Returns nil if a bound variable should not be used for the array.
70
+ def _bound_variable_type_for_array(r)
71
+ return unless Array === r && r.size > 1
72
+ classes = r.map(&:class)
73
+ classes.uniq!
74
+ classes.delete(NilClass)
75
+ return unless classes.size == 1
76
+
77
+ klass = classes[0]
78
+ if klass == Integer
79
+ # This branch is not taken on Ruby <2.4, because of the Fixnum/Bignum split.
80
+ # However, that causes no problems as pg_auto_parameterize handles integer
81
+ # arrays natively (though the SQL used is different)
82
+ "int8"
83
+ elsif klass == String
84
+ "text" if db.typecast_value(:boolean, db.opts[:treat_string_list_as_text_array])
85
+ elsif klass == BigDecimal
86
+ "numeric"
87
+ elsif klass == Date
88
+ "date"
89
+ elsif klass == Time
90
+ @db.cast_type_literal(Time)
91
+ elsif klass == Float
92
+ # PostgreSQL treats literal floats as numeric, not double precision
93
+ # But older versions of PostgreSQL don't handle Infinity/NaN in numeric
94
+ r.all?{|v| v.nil? || v.finite?} ? "numeric" : "double precision"
95
+ elsif klass == Sequel::SQLTime
96
+ "time"
97
+ elsif klass == DateTime
98
+ @db.cast_type_literal(DateTime)
99
+ elsif klass == Sequel::SQL::Blob
100
+ "bytea"
101
+ end
102
+ end
103
+ end
104
+ end
105
+
106
+ Database.register_extension(:pg_auto_parameterize_in_array) do |db|
107
+ db.extension(:pg_array, :pg_auto_parameterize)
108
+ db.extend_datasets(Postgres::AutoParameterizeInArray)
109
+ end
110
+ end
@@ -166,8 +166,7 @@ module Sequel
166
166
  def schema_post_process(_)
167
167
  super.each do |_, s|
168
168
  oid = s[:oid]
169
- if values = Sequel.synchronize{@enum_labels[oid]}
170
- s[:type] = :enum
169
+ if s[:type] == :enum && (values = Sequel.synchronize{@enum_labels[oid]})
171
170
  s[:enum_values] = values
172
171
  end
173
172
  end
@@ -22,7 +22,6 @@
22
22
  module Sequel
23
23
  module Postgres
24
24
  module ExtendedDateSupport
25
- DATE_YEAR_1 = Date.new(1)
26
25
  DATETIME_YEAR_1 = DateTime.new(1)
27
26
  TIME_YEAR_1 = Time.at(-62135596800).utc
28
27
  INFINITE_TIMESTAMP_STRINGS = ['infinity'.freeze, '-infinity'.freeze].freeze
@@ -38,6 +37,15 @@ module Sequel
38
37
  procs = db.conversion_procs
39
38
  procs[1082] = ::Sequel.method(:string_to_date)
40
39
  procs[1184] = procs[1114] = db.method(:to_application_timestamp)
40
+ if ocps = db.instance_variable_get(:@oid_convertor_map)
41
+ # Clear the oid convertor map entries for timestamps if they
42
+ # exist, so it will regenerate new ones that use this extension.
43
+ # This is only taken when using the jdbc adapter.
44
+ Sequel.synchronize do
45
+ ocps.delete(1184)
46
+ ocps.delete(1114)
47
+ end
48
+ end
41
49
  end
42
50
 
43
51
  # Handle BC dates and times in bound variables. This is necessary for Date values
@@ -181,7 +189,7 @@ module Sequel
181
189
 
182
190
  # Handle BC Date objects.
183
191
  def literal_date(date)
184
- if date < DATE_YEAR_1
192
+ if date.year < 1
185
193
  date <<= ((date.year) * 24 - 12)
186
194
  date.strftime("'%Y-%m-%d BC'")
187
195
  else
@@ -123,6 +123,15 @@
123
123
  # c = Sequel.pg_jsonb_op(:c)
124
124
  # DB[:t].update(c['key1'] => 1.to_json, c['key2'] => "a".to_json)
125
125
  #
126
+ # On PostgreSQL 16+, the <tt>IS [NOT] JSON</tt> operator is supported:
127
+ #
128
+ # j.is_json # j IS JSON
129
+ # j.is_json(type: :object) # j IS JSON OBJECT
130
+ # j.is_json(type: :object, unique: true) # j IS JSON OBJECT WITH UNIQUE
131
+ # j.is_not_json # j IS NOT JSON
132
+ # j.is_not_json(type: :array) # j IS NOT JSON ARRAY
133
+ # j.is_not_json(unique: true) # j IS NOT JSON WITH UNIQUE
134
+ #
126
135
  # If you are also using the pg_json extension, you should load it before
127
136
  # loading this extension. Doing so will allow you to use the #op method on
128
137
  # JSONHash, JSONHarray, JSONBHash, and JSONBArray, allowing you to perform json/jsonb operations
@@ -151,6 +160,18 @@ module Sequel
151
160
  GET_PATH = ["(".freeze, " #> ".freeze, ")".freeze].freeze
152
161
  GET_PATH_TEXT = ["(".freeze, " #>> ".freeze, ")".freeze].freeze
153
162
 
163
+ IS_JSON = ["(".freeze, " IS JSON".freeze, "".freeze, ")".freeze].freeze
164
+ IS_NOT_JSON = ["(".freeze, " IS NOT JSON".freeze, "".freeze, ")".freeze].freeze
165
+ EMPTY_STRING = Sequel::LiteralString.new('').freeze
166
+ WITH_UNIQUE = Sequel::LiteralString.new(' WITH UNIQUE').freeze
167
+ IS_JSON_MAP = {
168
+ nil => EMPTY_STRING,
169
+ :value => Sequel::LiteralString.new(' VALUE').freeze,
170
+ :scalar => Sequel::LiteralString.new(' SCALAR').freeze,
171
+ :object => Sequel::LiteralString.new(' OBJECT').freeze,
172
+ :array => Sequel::LiteralString.new(' ARRAY').freeze
173
+ }.freeze
174
+
154
175
  # Get JSON array element or object field as json. If an array is given,
155
176
  # gets the object at the specified path.
156
177
  #
@@ -233,6 +254,30 @@ module Sequel
233
254
  end
234
255
  end
235
256
 
257
+ # Return whether the json object can be parsed as JSON.
258
+ #
259
+ # Options:
260
+ # :type :: Check whether the json object can be parsed as a specific type
261
+ # of JSON (:value, :scalar, :object, :array).
262
+ # :unique :: Check JSON objects for unique keys.
263
+ #
264
+ # json_op.is_json # json IS JSON
265
+ # json_op.is_json(type: :object) # json IS JSON OBJECT
266
+ # json_op.is_json(unique: true) # json IS JSON WITH UNIQUE
267
+ def is_json(opts=OPTS)
268
+ _is_json(IS_JSON, opts)
269
+ end
270
+
271
+ # Return whether the json object cannot be parsed as JSON. The opposite
272
+ # of #is_json. See #is_json for options.
273
+ #
274
+ # json_op.is_not_json # json IS NOT JSON
275
+ # json_op.is_not_json(type: :object) # json IS NOT JSON OBJECT
276
+ # json_op.is_not_json(unique: true) # json IS NOT JSON WITH UNIQUE
277
+ def is_not_json(opts=OPTS)
278
+ _is_json(IS_NOT_JSON, opts)
279
+ end
280
+
236
281
  # Returns a set of keys AS text in the json object.
237
282
  #
238
283
  # json_op.keys # json_object_keys(json)
@@ -286,6 +331,13 @@ module Sequel
286
331
 
287
332
  private
288
333
 
334
+ # Internals of IS [NOT] JSON support
335
+ def _is_json(lit_array, opts)
336
+ raise Error, "invalid is_json :type option: #{opts[:type].inspect}" unless type = IS_JSON_MAP[opts[:type]]
337
+ unique = opts[:unique] ? WITH_UNIQUE : EMPTY_STRING
338
+ Sequel::SQL::BooleanExpression.new(:NOOP, Sequel::SQL::PlaceholderLiteralString.new(lit_array, [self, type, unique]))
339
+ end
340
+
289
341
  # Return a placeholder literal with the given str and args, wrapped
290
342
  # in an JSONOp or JSONBOp, used by operators that return json or jsonb.
291
343
  def json_op(str, args)
@@ -221,7 +221,7 @@ module Sequel
221
221
  private
222
222
 
223
223
  # Recognize the registered database multirange types.
224
- def schema_column_type(db_type)
224
+ def schema_multirange_type(db_type)
225
225
  @pg_multirange_schema_types[db_type] || super
226
226
  end
227
227
 
@@ -234,7 +234,7 @@ module Sequel
234
234
  private
235
235
 
236
236
  # Recognize the registered database range types.
237
- def schema_column_type(db_type)
237
+ def schema_range_type(db_type)
238
238
  @pg_range_schema_types[db_type] || super
239
239
  end
240
240
 
@@ -538,12 +538,8 @@ module Sequel
538
538
  private
539
539
 
540
540
  # Make the column type detection handle registered row types.
541
- def schema_column_type(db_type)
542
- if type = @row_schema_types[db_type]
543
- type
544
- else
545
- super
546
- end
541
+ def schema_composite_type(db_type)
542
+ @row_schema_types[db_type] || super
547
543
  end
548
544
  end
549
545
  end
@@ -52,7 +52,7 @@ module Sequel
52
52
  # Dump the cached schema to the filename given in Marshal format.
53
53
  def dump_schema_cache(file)
54
54
  sch = {}
55
- @schemas.each do |k,v|
55
+ @schemas.sort.each do |k,v|
56
56
  sch[k] = v.map do |c, h|
57
57
  h = Hash[h]
58
58
  h.delete(:callable_default)
@@ -69,7 +69,8 @@ module Sequel
69
69
  # Also defines the with_server method on the receiver for easy use.
70
70
  def self.extended(db)
71
71
  pool = db.pool
72
- if defined?(ShardedThreadedConnectionPool) && pool.is_a?(ShardedThreadedConnectionPool)
72
+ case pool.pool_type
73
+ when :sharded_threaded, :sharded_timed_queue
73
74
  pool.extend(ThreadedServerBlock)
74
75
  pool.instance_variable_set(:@default_servers, {})
75
76
  else
@@ -1945,8 +1945,10 @@ module Sequel
1945
1945
  end
1946
1946
 
1947
1947
  # If transactions should be used, wrap the yield in a transaction block.
1948
- def checked_transaction(opts=OPTS)
1949
- use_transaction?(opts) ? db.transaction({:server=>this_server}.merge!(opts)){yield} : yield
1948
+ def checked_transaction(opts=OPTS, &block)
1949
+ h = {:server=>this_server}.merge!(opts)
1950
+ h[:skip_transaction] = true unless use_transaction?(opts)
1951
+ db.transaction(h, &block)
1950
1952
  end
1951
1953
 
1952
1954
  # Change the value of the column to given value, recording the change.
@@ -2031,19 +2033,20 @@ module Sequel
2031
2033
  meths = setter_methods(type)
2032
2034
  strict = strict_param_setting
2033
2035
  hash.each do |k,v|
2036
+ k = k.to_s
2034
2037
  m = "#{k}="
2035
2038
  if meths.include?(m)
2036
2039
  set_column_value(m, v)
2037
2040
  elsif strict
2038
2041
  # Avoid using respond_to? or creating symbols from user input
2039
2042
  if public_methods.map(&:to_s).include?(m)
2040
- if Array(model.primary_key).map(&:to_s).member?(k.to_s) && model.restrict_primary_key?
2041
- raise MassAssignmentRestriction, "#{k} is a restricted primary key"
2043
+ if Array(model.primary_key).map(&:to_s).member?(k) && model.restrict_primary_key?
2044
+ raise MassAssignmentRestriction.create("#{k} is a restricted primary key", self, k)
2042
2045
  else
2043
- raise MassAssignmentRestriction, "#{k} is a restricted column"
2046
+ raise MassAssignmentRestriction.create("#{k} is a restricted column", self, k)
2044
2047
  end
2045
2048
  else
2046
- raise MassAssignmentRestriction, "method #{m} doesn't exist"
2049
+ raise MassAssignmentRestriction.create("method #{m} doesn't exist", self, k)
2047
2050
  end
2048
2051
  end
2049
2052
  end
@@ -2147,8 +2150,9 @@ module Sequel
2147
2150
  # # DELETE FROM artists WHERE (id = 2)
2148
2151
  # # ...
2149
2152
  def destroy
2150
- pr = proc{all(&:destroy).length}
2151
- model.use_transactions ? @db.transaction(:server=>opts[:server], &pr) : pr.call
2153
+ @db.transaction(:server=>opts[:server], :skip_transaction=>model.use_transactions == false) do
2154
+ all(&:destroy).length
2155
+ end
2152
2156
  end
2153
2157
 
2154
2158
  # If there is no order already defined on this dataset, order it by
@@ -2228,11 +2232,17 @@ module Sequel
2228
2232
 
2229
2233
  private
2230
2234
 
2235
+ # Return the dataset ordered by the model's primary key. This should not
2236
+ # be used if the model does not have a primary key.
2237
+ def _force_primary_key_order
2238
+ cached_dataset(:_pk_order_ds){order(*model.primary_key)}
2239
+ end
2240
+
2231
2241
  # If the dataset is not already ordered, and the model has a primary key,
2232
2242
  # return a clone ordered by the primary key.
2233
2243
  def _primary_key_order
2234
- if @opts[:order].nil? && model && (pk = model.primary_key)
2235
- cached_dataset(:_pk_order_ds){order(*pk)}
2244
+ if @opts[:order].nil? && model && model.primary_key
2245
+ _force_primary_key_order
2236
2246
  end
2237
2247
  end
2238
2248
 
@@ -8,6 +8,9 @@ module Sequel
8
8
  # automatically creates class methods for public dataset
9
9
  # methods.
10
10
  class DatasetModule < Dataset::DatasetModule
11
+ # The model class related to this dataset module.
12
+ attr_reader :model
13
+
11
14
  # Store the model related to this dataset module.
12
15
  def initialize(model)
13
16
  @model = model
@@ -24,11 +24,23 @@ module Sequel
24
24
  UndefinedAssociation = Class.new(Error)
25
25
  ).name
26
26
 
27
- (
28
27
  # Raised when a mass assignment method is called in strict mode with either a restricted column
29
28
  # or a column without a setter method.
30
- MassAssignmentRestriction = Class.new(Error)
31
- ).name
29
+ class MassAssignmentRestriction < Error
30
+ # The Sequel::Model object related to this exception.
31
+ attr_reader :model
32
+
33
+ # The column related to this exception, as a string.
34
+ attr_reader :column
35
+
36
+ # Create an instance of this class with the model and column set.
37
+ def self.create(msg, model, column)
38
+ e = new("#{msg} for class #{model.class.inspect}")
39
+ e.instance_variable_set(:@model, model)
40
+ e.instance_variable_set(:@column, column)
41
+ e
42
+ end
43
+ end
32
44
 
33
45
  # Exception class raised when +raise_on_save_failure+ is set and validation fails
34
46
  class ValidationFailed < Error
@@ -31,7 +31,6 @@ rescue RuntimeError, OpenSSL::Cipher::CipherError
31
31
  # :nocov:
32
32
  end
33
33
 
34
- require 'base64'
35
34
  require 'securerandom'
36
35
 
37
36
  module Sequel
@@ -375,7 +374,7 @@ module Sequel
375
374
  # Decrypt using any supported format and any available key.
376
375
  def decrypt(data)
377
376
  begin
378
- data = Base64.urlsafe_decode64(data)
377
+ data = urlsafe_decode64(data)
379
378
  rescue ArgumentError
380
379
  raise Error, "Unable to decode encrypted column: invalid base64"
381
380
  end
@@ -448,7 +447,7 @@ module Sequel
448
447
  # The prefix string of columns for the given search type and the first configured encryption key.
449
448
  # Used to find values that do not use this prefix in order to perform reencryption.
450
449
  def current_key_prefix(search_type)
451
- Base64.urlsafe_encode64("#{search_type.chr}\0#{@key_id.chr}")
450
+ urlsafe_encode64("#{search_type.chr}\0#{@key_id.chr}")
452
451
  end
453
452
 
454
453
  # The prefix values to search for the given data (an array of strings), assuming the column uses
@@ -472,11 +471,33 @@ module Sequel
472
471
 
473
472
  private
474
473
 
474
+ if RUBY_VERSION >= '2.4'
475
+ def decode64(str)
476
+ str.unpack1("m0")
477
+ end
478
+ # :nocov:
479
+ else
480
+ def decode64(str)
481
+ str.unpack("m0")[0]
482
+ end
483
+ # :nocov:
484
+ end
485
+
486
+ def urlsafe_encode64(bin)
487
+ str = [bin].pack("m0")
488
+ str.tr!("+/", "-_")
489
+ str
490
+ end
491
+
492
+ def urlsafe_decode64(str)
493
+ decode64(str.tr("-_", "+/"))
494
+ end
495
+
475
496
  # An array of strings, one for each configured encryption key, to find encypted values matching
476
497
  # the given data and search format.
477
498
  def _search_prefixes(data, search_type)
478
499
  @key_map.map do |key_id, (key, _)|
479
- Base64.urlsafe_encode64(_search_prefix(data, search_type, key_id, key))
500
+ urlsafe_encode64(_search_prefix(data, search_type, key_id, key))
480
501
  end
481
502
  end
482
503
 
@@ -509,7 +530,7 @@ module Sequel
509
530
  cipher_text << cipher.update(data) if data_size > 0
510
531
  cipher_text << cipher.final
511
532
 
512
- Base64.urlsafe_encode64("#{prefix}#{random_data}#{cipher_iv}#{cipher.auth_tag}#{cipher_text}")
533
+ urlsafe_encode64("#{prefix}#{random_data}#{cipher_iv}#{cipher.auth_tag}#{cipher_text}")
513
534
  end
514
535
  end
515
536
 
@@ -125,14 +125,15 @@ module Sequel
125
125
  ds = @dataset.with_quote_identifiers(false)
126
126
  table_name = ds.literal(ds.first_source_table)
127
127
  reflections = {}
128
- @constraint_validations = (Sequel.synchronize{hash[table_name]} || []).map{|r| constraint_validation_array(r, reflections)}
128
+ allow_missing_columns = db_schema.select{|col, sch| sch[:allow_null] == false && nil != sch[:default]}.map(&:first)
129
+ @constraint_validations = (Sequel.synchronize{hash[table_name]} || []).map{|r| constraint_validation_array(r, reflections, allow_missing_columns)}
129
130
  @constraint_validation_reflections = reflections
130
131
  end
131
132
  end
132
133
 
133
134
  # Given a specific database constraint validation metadata row hash, transform
134
135
  # it in an validation method call array suitable for splatting to send.
135
- def constraint_validation_array(r, reflections)
136
+ def constraint_validation_array(r, reflections, allow_missing_columns=EMPTY_ARRAY)
136
137
  opts = {}
137
138
  opts[:message] = r[:message] if r[:message]
138
139
  opts[:allow_nil] = true if db.typecast_value(:boolean, r[:allow_nil])
@@ -191,11 +192,13 @@ module Sequel
191
192
  reflection_opts[:argument] = arg
192
193
  end
193
194
 
194
- a << column
195
- unless opts.empty?
196
- a << opts
195
+ opts[:from] = :values
196
+ if column.is_a?(Symbol) && allow_missing_columns.include?(column)
197
+ opts[:allow_missing] = true
197
198
  end
198
199
 
200
+ a << column << opts
201
+
199
202
  if column.is_a?(Array) && column.length == 1
200
203
  column = column.first
201
204
  end
@@ -1,5 +1,7 @@
1
1
  # frozen-string-literal: true
2
2
 
3
+ require 'delegate'
4
+
3
5
  module Sequel
4
6
  module Plugins
5
7
  # The defaults_setter plugin makes the column getter methods return the default
@@ -106,6 +108,20 @@ module Sequel
106
108
  lambda{Date.today}
107
109
  when Sequel::CURRENT_TIMESTAMP
108
110
  lambda{dataset.current_datetime}
111
+ when Hash, Array
112
+ v = Marshal.dump(v).freeze
113
+ lambda{Marshal.load(v)}
114
+ when Delegator
115
+ # DelegateClass returns an anonymous case, which cannot be marshalled, so marshal the
116
+ # underlying object and create a new instance of the class with the unmarshalled object.
117
+ klass = v.class
118
+ case o = v.__getobj__
119
+ when Hash, Array
120
+ v = Marshal.dump(o).freeze
121
+ lambda{klass.new(Marshal.load(v))}
122
+ else
123
+ v
124
+ end
109
125
  else
110
126
  v
111
127
  end
@@ -26,57 +26,27 @@ module Sequel
26
26
  module MssqlOptimisticLocking
27
27
  # Load the instance_filters plugin into the model.
28
28
  def self.apply(model, opts=OPTS)
29
- model.plugin :instance_filters
29
+ model.plugin(:optimistic_locking_base)
30
30
  end
31
31
 
32
- # Set the lock_column to the :lock_column option (default: :timestamp)
32
+ # Set the lock column
33
33
  def self.configure(model, opts=OPTS)
34
- model.lock_column = opts[:lock_column] || :timestamp
34
+ model.lock_column = opts[:lock_column] || model.lock_column || :timestamp
35
35
  end
36
-
37
- module ClassMethods
38
- # The timestamp/rowversion column containing the version for the current row.
39
- attr_accessor :lock_column
40
-
41
- Plugins.inherited_instance_variables(self, :@lock_column=>nil)
42
- end
43
-
36
+
44
37
  module InstanceMethods
45
- # Add the lock column instance filter to the object before destroying it.
46
- def before_destroy
47
- lock_column_instance_filter
48
- super
49
- end
50
-
51
- # Add the lock column instance filter to the object before updating it.
52
- def before_update
53
- lock_column_instance_filter
54
- super
55
- end
56
-
57
38
  private
58
39
 
59
- # Add the lock column instance filter to the object.
60
- def lock_column_instance_filter
61
- lc = model.lock_column
62
- instance_filter(lc=>Sequel.blob(get_column_value(lc)))
63
- end
64
-
65
- # Clear the instance filters when refreshing, so that attempting to
66
- # refresh after a failed save removes the previous lock column filter
67
- # (the new one will be added before updating).
68
- def _refresh(ds)
69
- clear_instance_filters
70
- super
40
+ # Make the instance filter value a blob.
41
+ def lock_column_instance_filter_value
42
+ Sequel.blob(super)
71
43
  end
72
44
 
73
45
  # Remove the lock column from the columns to update.
74
46
  # SQL Server automatically updates the lock column value, and does not like
75
47
  # it to be assigned.
76
48
  def _save_update_all_columns_hash
77
- v = @values.dup
78
- cc = changed_columns
79
- Array(primary_key).each{|x| v.delete(x) unless cc.include?(x)}
49
+ v = super
80
50
  v.delete(model.lock_column)
81
51
  v
82
52
  end