sequel 5.85.0 → 5.93.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/lib/sequel/adapters/ado.rb +1 -1
  3. data/lib/sequel/adapters/ibmdb.rb +1 -0
  4. data/lib/sequel/adapters/jdbc/db2.rb +2 -2
  5. data/lib/sequel/adapters/jdbc/derby.rb +2 -2
  6. data/lib/sequel/adapters/jdbc/h2.rb +2 -2
  7. data/lib/sequel/adapters/jdbc/hsqldb.rb +2 -2
  8. data/lib/sequel/adapters/jdbc/jtds.rb +2 -2
  9. data/lib/sequel/adapters/jdbc/mysql.rb +1 -1
  10. data/lib/sequel/adapters/jdbc/oracle.rb +5 -5
  11. data/lib/sequel/adapters/jdbc/postgresql.rb +5 -5
  12. data/lib/sequel/adapters/jdbc/sqlanywhere.rb +6 -6
  13. data/lib/sequel/adapters/jdbc/sqlite.rb +2 -2
  14. data/lib/sequel/adapters/jdbc/sqlserver.rb +2 -2
  15. data/lib/sequel/adapters/jdbc.rb +8 -8
  16. data/lib/sequel/adapters/mysql2.rb +8 -1
  17. data/lib/sequel/adapters/oracle.rb +16 -0
  18. data/lib/sequel/adapters/shared/access.rb +1 -0
  19. data/lib/sequel/adapters/shared/mssql.rb +4 -3
  20. data/lib/sequel/adapters/shared/mysql.rb +8 -4
  21. data/lib/sequel/adapters/shared/oracle.rb +1 -0
  22. data/lib/sequel/adapters/shared/postgres.rb +140 -9
  23. data/lib/sequel/adapters/sqlite.rb +4 -0
  24. data/lib/sequel/adapters/trilogy.rb +1 -2
  25. data/lib/sequel/core.rb +15 -0
  26. data/lib/sequel/database/dataset_defaults.rb +3 -3
  27. data/lib/sequel/database/misc.rb +17 -4
  28. data/lib/sequel/database/query.rb +11 -11
  29. data/lib/sequel/database/schema_generator.rb +8 -0
  30. data/lib/sequel/dataset/deprecated_singleton_class_methods.rb +1 -1
  31. data/lib/sequel/dataset/prepared_statements.rb +70 -25
  32. data/lib/sequel/dataset/query.rb +9 -5
  33. data/lib/sequel/dataset/sql.rb +19 -9
  34. data/lib/sequel/extensions/connection_validator.rb +15 -10
  35. data/lib/sequel/extensions/migration.rb +23 -3
  36. data/lib/sequel/extensions/null_dataset.rb +2 -2
  37. data/lib/sequel/extensions/pg_auto_parameterize.rb +6 -1
  38. data/lib/sequel/extensions/pg_auto_parameterize_in_array.rb +93 -10
  39. data/lib/sequel/extensions/pg_enum.rb +3 -3
  40. data/lib/sequel/extensions/pg_row.rb +3 -1
  41. data/lib/sequel/extensions/pg_schema_caching.rb +90 -0
  42. data/lib/sequel/extensions/query_blocker.rb +172 -0
  43. data/lib/sequel/extensions/schema_caching.rb +24 -9
  44. data/lib/sequel/extensions/schema_dumper.rb +16 -4
  45. data/lib/sequel/extensions/sqlite_json_ops.rb +1 -1
  46. data/lib/sequel/extensions/string_agg.rb +2 -2
  47. data/lib/sequel/extensions/virtual_row_method_block.rb +1 -0
  48. data/lib/sequel/model/associations.rb +28 -3
  49. data/lib/sequel/model/base.rb +67 -18
  50. data/lib/sequel/plugins/composition.rb +1 -1
  51. data/lib/sequel/plugins/enum.rb +1 -1
  52. data/lib/sequel/plugins/forbid_lazy_load.rb +14 -1
  53. data/lib/sequel/plugins/inspect_pk.rb +44 -0
  54. data/lib/sequel/plugins/instance_filters.rb +4 -1
  55. data/lib/sequel/plugins/inverted_subsets.rb +1 -0
  56. data/lib/sequel/plugins/lazy_attributes.rb +1 -1
  57. data/lib/sequel/plugins/nested_attributes.rb +10 -5
  58. data/lib/sequel/plugins/paged_operations.rb +5 -2
  59. data/lib/sequel/plugins/pg_auto_constraint_validations.rb +6 -1
  60. data/lib/sequel/plugins/pg_auto_validate_enums.rb +88 -0
  61. data/lib/sequel/plugins/pg_eager_any_typed_array.rb +95 -0
  62. data/lib/sequel/plugins/rcte_tree.rb +1 -1
  63. data/lib/sequel/plugins/serialization.rb +11 -5
  64. data/lib/sequel/plugins/sql_comments.rb +7 -2
  65. data/lib/sequel/plugins/static_cache_cache.rb +50 -13
  66. data/lib/sequel/plugins/subset_conditions.rb +85 -5
  67. data/lib/sequel/plugins/subset_static_cache.rb +263 -0
  68. data/lib/sequel/sql.rb +15 -6
  69. data/lib/sequel/version.rb +1 -1
  70. metadata +9 -6
@@ -685,10 +685,10 @@ module Sequel
685
685
  # being quoted, returns name as a string. If identifiers are being quoted
686
686
  # quote the name with quoted_identifier.
687
687
  def quote_identifier_append(sql, name)
688
+ name = name.value if name.is_a?(SQL::Identifier)
688
689
  if name.is_a?(LiteralString)
689
690
  sql << name
690
691
  else
691
- name = name.value if name.is_a?(SQL::Identifier)
692
692
  name = input_identifier(name)
693
693
  if quote_identifiers?
694
694
  quoted_identifier_append(sql, name)
@@ -700,11 +700,14 @@ module Sequel
700
700
 
701
701
  # Append literalization of identifier or unqualified identifier to SQL string.
702
702
  def quote_schema_table_append(sql, table)
703
- schema, table = schema_and_table(table)
704
- if schema
705
- quote_identifier_append(sql, schema)
703
+ qualifiers = split_qualifiers(table)
704
+ table = qualifiers.pop
705
+
706
+ qualifiers.each do |q|
707
+ quote_identifier_append(sql, q)
706
708
  sql << '.'
707
709
  end
710
+
708
711
  quote_identifier_append(sql, table)
709
712
  end
710
713
 
@@ -1032,7 +1035,7 @@ module Sequel
1032
1035
  if column_aliases
1033
1036
  raise Error, "#{db.database_type} does not support derived column lists" unless supports_derived_column_lists?
1034
1037
  sql << '('
1035
- identifier_list_append(sql, column_aliases)
1038
+ derived_column_list_sql_append(sql, column_aliases)
1036
1039
  sql << ')'
1037
1040
  end
1038
1041
  end
@@ -1165,6 +1168,11 @@ module Sequel
1165
1168
  end
1166
1169
  end
1167
1170
 
1171
+ # Append the column aliases to the SQL.
1172
+ def derived_column_list_sql_append(sql, column_aliases)
1173
+ identifier_list_append(sql, column_aliases)
1174
+ end
1175
+
1168
1176
  # Disable caching of SQL for the current dataset
1169
1177
  def disable_sql_caching!
1170
1178
  cache_set(:_no_cache_sql, true)
@@ -1425,10 +1433,6 @@ module Sequel
1425
1433
  # calls +sql_literal+ if object responds to it, otherwise raises an error.
1426
1434
  # If a database specific type is allowed, this should be overriden in a subclass.
1427
1435
  def literal_other_append(sql, v)
1428
- # We can't be sure if v will always literalize to the same SQL, so
1429
- # don't cache SQL for a dataset that uses this.
1430
- disable_sql_caching!
1431
-
1432
1436
  if v.respond_to?(:sql_literal_append)
1433
1437
  v.sql_literal_append(self, sql)
1434
1438
  elsif v.respond_to?(:sql_literal)
@@ -1436,6 +1440,12 @@ module Sequel
1436
1440
  else
1437
1441
  raise Error, "can't express #{v.inspect} as a SQL literal"
1438
1442
  end
1443
+
1444
+ if !v.respond_to?(:sql_literal_allow_caching?) || !v.sql_literal_allow_caching?(self)
1445
+ # We can't be sure if v will always literalize to the same SQL, so
1446
+ # don't cache SQL for a dataset that uses this.
1447
+ disable_sql_caching!
1448
+ end
1439
1449
  end
1440
1450
 
1441
1451
  # SQL fragment for Sequel::SQLTime, containing just the time part
@@ -105,18 +105,23 @@ module Sequel
105
105
  1.times do
106
106
  if (conn = super) &&
107
107
  (timer = sync{@connection_timestamps.delete(conn)}) &&
108
- Sequel.elapsed_seconds_since(timer) > @connection_validation_timeout &&
109
- !db.valid_connection?(conn)
108
+ Sequel.elapsed_seconds_since(timer) > @connection_validation_timeout
110
109
 
111
- case pool_type
112
- when :sharded_threaded, :sharded_timed_queue
113
- sync{@allocated[a.last].delete(Sequel.current)}
114
- else
115
- sync{@allocated.delete(Sequel.current)}
116
- end
110
+ begin
111
+ valid = db.valid_connection?(conn)
112
+ ensure
113
+ unless valid
114
+ case pool_type
115
+ when :sharded_threaded, :sharded_timed_queue
116
+ sync{@allocated[a.last].delete(Sequel.current)}
117
+ else
118
+ sync{@allocated.delete(Sequel.current)}
119
+ end
117
120
 
118
- disconnect_connection(conn)
119
- redo
121
+ disconnect_connection(conn)
122
+ redo if valid == false
123
+ end
124
+ end
120
125
  end
121
126
  end
122
127
 
@@ -223,7 +223,7 @@ module Sequel
223
223
  @actions << [:drop_join_table, *args]
224
224
  end
225
225
 
226
- def create_table(name, opts=OPTS)
226
+ def create_table(name, opts=OPTS, &_)
227
227
  @actions << [:drop_table, name, opts]
228
228
  end
229
229
 
@@ -287,6 +287,10 @@ module Sequel
287
287
  def set_column_allow_null(name, allow_null=true)
288
288
  @actions << [:set_column_allow_null, name, !allow_null]
289
289
  end
290
+
291
+ def set_column_not_null(name)
292
+ @actions << [:set_column_allow_null, name]
293
+ end
290
294
  end
291
295
 
292
296
  # The preferred method for writing Sequel migrations, using a DSL:
@@ -371,7 +375,7 @@ module Sequel
371
375
  #
372
376
  # Part of the +migration+ extension.
373
377
  class Migrator
374
- MIGRATION_FILE_PATTERN = /\A(\d+)_.+\.rb\z/i.freeze
378
+ MIGRATION_FILE_PATTERN = /\A(\d+)_(.+)\.rb\z/i.freeze
375
379
 
376
380
  # Mutex used around migration file loading
377
381
  MUTEX = Mutex.new
@@ -791,7 +795,23 @@ module Sequel
791
795
  next unless MIGRATION_FILE_PATTERN.match(file)
792
796
  files << File.join(directory, file)
793
797
  end
794
- files.sort_by{|f| MIGRATION_FILE_PATTERN.match(File.basename(f))[1].to_i}
798
+ files.sort! do |a, b|
799
+ a_ver, a_name = split_migration_filename(a)
800
+ b_ver, b_name = split_migration_filename(b)
801
+ x = a_ver <=> b_ver
802
+ if x.zero?
803
+ x = a_name <=> b_name
804
+ end
805
+ x
806
+ end
807
+ files
808
+ end
809
+
810
+ # Return an integer and name (without extension) for the given path.
811
+ def split_migration_filename(path)
812
+ version, name = MIGRATION_FILE_PATTERN.match(File.basename(path)).captures
813
+ version = version.to_i
814
+ [version, name]
795
815
  end
796
816
 
797
817
  # Returns tuples of migration, filename, and direction
@@ -63,12 +63,12 @@ module Sequel
63
63
  end
64
64
 
65
65
  # Return self without sending a database query, never yielding.
66
- def each
66
+ def each(&_)
67
67
  self
68
68
  end
69
69
 
70
70
  # Return nil without sending a database query, never yielding.
71
- def fetch_rows(sql)
71
+ def fetch_rows(sql, &_)
72
72
  nil
73
73
  end
74
74
 
@@ -394,7 +394,7 @@ module Sequel
394
394
  # there can be more than one parameter per column, so this doesn't prevent going
395
395
  # over the limit, though it does make it less likely.
396
396
  def default_import_slice
397
- 40
397
+ @opts[:no_auto_parameterize] ? super : 40
398
398
  end
399
399
 
400
400
  # Handle parameterization of multi_insert_sql
@@ -463,6 +463,11 @@ module Sequel
463
463
  @opts[:no_auto_parameterize] ? super : QueryString.new
464
464
  end
465
465
 
466
+ # A mutable string used as the prefix when explaining a query.
467
+ def explain_sql_string_origin(opts)
468
+ @opts[:no_auto_parameterize] ? super : (QueryString.new << super)
469
+ end
470
+
466
471
  # If subquery uses with_sql with a method name symbol, get the dataset
467
472
  # with_sql was called on, and use that as the subquery, recording the
468
473
  # arguments to with_sql that will be used to calculate the sql.
@@ -21,15 +21,26 @@
21
21
  # DateTime :: timestamp (or timestamptz if pg_timestamptz extension is used)
22
22
  # Sequel::SQLTime :: time
23
23
  # Sequel::SQL::Blob :: bytea
24
+ #
25
+ # Arrays of string values are not automatically converted by default, because the Ruby
26
+ # String class can represent a number of different database types. To convert
27
+ # arrays of Ruby strings to an untyped array (a query parameter with no explicit
28
+ # type cast), set the +:treat_string_list_as_untyped_array+ Database option
29
+ # before loading the extension.
24
30
  #
25
- # String values are also supported using the +text+ type, but only if the
26
- # +:treat_string_list_as_text_array+ Database option is used. This is because
27
- # treating strings as text can break programs, since the type for
28
- # literal strings in PostgreSQL is +unknown+, not +text+.
31
+ # If you will only be using arrays of Ruby strings that represent the +text+ type,
32
+ # you can use the +:treat_string_list_as_text_array+ Database option is used. This
33
+ # can break programs, since the type for literal strings in PostgreSQL is +unknown+,
34
+ # not +text+.
29
35
  #
30
- # The conversion is only done for single dimensional arrays that have more
31
- # than two elements, where all elements are of the same class (other than
32
- # nil values).
36
+ # The conversion is only done for single dimensional arrays that have two or
37
+ # more elements, where all elements are of the same class (other than
38
+ # +nil+ values). You can also do the conversion for arrays of 1 element by setting
39
+ # <tt>pg_auto_parameterize_min_array_size: 1</tt> Database option. This makes
40
+ # finding cases that need special handling easier, but it doesn't match
41
+ # how PostgreSQL internally converts the expression (PostgreSQL converts
42
+ # <tt>IN (single_value)</tt> to <tt>= single_value</tt>, not
43
+ # <tt>= ANY(ARRAY[single_value])</tt>).
33
44
  #
34
45
  # Related module: Sequel::Postgres::AutoParameterizeInArray
35
46
 
@@ -37,6 +48,47 @@ module Sequel
37
48
  module Postgres
38
49
  # Enable automatically parameterizing queries.
39
50
  module AutoParameterizeInArray
51
+ module TreatStringListAsUntypedArray
52
+ # Sentinal value to use as an auto param type to use auto parameterization
53
+ # of a string array without an explicit type cast.
54
+ NO_EXPLICIT_CAST = Object.new.freeze
55
+
56
+ # Wrapper for untyped PGArray values that will be parameterized directly
57
+ # into the query. This should only be used in cases where you know the
58
+ # value should be added as a query parameter.
59
+ class ParameterizedUntypedPGArray < SQL::Wrapper
60
+ def to_s_append(ds, sql)
61
+ sql.add_arg(@value)
62
+ end
63
+ end
64
+
65
+ private
66
+
67
+ # Recognize NO_EXPLICIT_CAST sentinal value and use wrapped
68
+ # PGArray that will be parameterized into the query.
69
+ def _convert_array_to_pg_array_with_type(r, type)
70
+ if NO_EXPLICIT_CAST.equal?(type)
71
+ ParameterizedUntypedPGArray.new(Sequel.pg_array(r))
72
+ else
73
+ super
74
+ end
75
+ end
76
+
77
+ # Use a query parameter with no type cast for string arrays.
78
+ def _bound_variable_type_for_string_array(r)
79
+ NO_EXPLICIT_CAST
80
+ end
81
+ end
82
+
83
+ module TreatStringListAsTextArray
84
+ private
85
+
86
+ # Assume all string arrays used on RHS of IN/NOT IN are for type text[]
87
+ def _bound_variable_type_for_string_array(r)
88
+ "text"
89
+ end
90
+ end
91
+
40
92
  # Transform column IN (...) expressions into column = ANY($)
41
93
  # and column NOT IN (...) expressions into column != ALL($)
42
94
  # using an array bound variable for the ANY/ALL argument,
@@ -56,7 +108,7 @@ module Sequel
56
108
  op = :!=
57
109
  func = :ALL
58
110
  end
59
- args = [l, Sequel.function(func, Sequel.pg_array(r, type))]
111
+ args = [l, Sequel.function(func, _convert_array_to_pg_array_with_type(r, type))]
60
112
  end
61
113
  end
62
114
 
@@ -68,7 +120,7 @@ module Sequel
68
120
  # The bound variable type string to use for the bound variable array.
69
121
  # Returns nil if a bound variable should not be used for the array.
70
122
  def _bound_variable_type_for_array(r)
71
- return unless Array === r && r.size > 1
123
+ return unless Array === r && r.size >= pg_auto_parameterize_min_array_size
72
124
  classes = r.map(&:class)
73
125
  classes.uniq!
74
126
  classes.delete(NilClass)
@@ -81,7 +133,7 @@ module Sequel
81
133
  # arrays natively (though the SQL used is different)
82
134
  "int8"
83
135
  elsif klass == String
84
- "text" if db.typecast_value(:boolean, db.opts[:treat_string_list_as_text_array])
136
+ _bound_variable_type_for_string_array(r)
85
137
  elsif klass == BigDecimal
86
138
  "numeric"
87
139
  elsif klass == Date
@@ -100,11 +152,42 @@ module Sequel
100
152
  "bytea"
101
153
  end
102
154
  end
155
+
156
+ # Do not auto parameterize string arrays by default.
157
+ def _bound_variable_type_for_string_array(r)
158
+ nil
159
+ end
160
+
161
+ # The minimium size of array to auto parameterize.
162
+ def pg_auto_parameterize_min_array_size
163
+ 2
164
+ end
165
+
166
+ # Convert RHS of IN/NOT IN operator to PGArray with given type.
167
+ def _convert_array_to_pg_array_with_type(r, type)
168
+ Sequel.pg_array(r, type)
169
+ end
103
170
  end
104
171
  end
105
172
 
106
173
  Database.register_extension(:pg_auto_parameterize_in_array) do |db|
107
174
  db.extension(:pg_array, :pg_auto_parameterize)
108
175
  db.extend_datasets(Postgres::AutoParameterizeInArray)
176
+
177
+ if db.typecast_value(:boolean, db.opts[:treat_string_list_as_text_array])
178
+ db.extend_datasets(Postgres::AutoParameterizeInArray::TreatStringListAsTextArray)
179
+ elsif db.typecast_value(:boolean, db.opts[:treat_string_list_as_untyped_array])
180
+ db.extend_datasets(Postgres::AutoParameterizeInArray::TreatStringListAsUntypedArray)
181
+ end
182
+
183
+ if min_array_size = db.opts[:pg_auto_parameterize_min_array_size]
184
+ min_array_size = db.typecast_value(:integer, min_array_size)
185
+ mod = Module.new do
186
+ define_method(:pg_auto_parameterize_min_array_size){min_array_size}
187
+ private :pg_auto_parameterize_min_array_size
188
+ end
189
+ Sequel.set_temp_name(mod){"Sequel::Postgres::AutoParameterizeInArray::_MinArraySize#{min_array_size}"}
190
+ db.extend_datasets(mod)
191
+ end
109
192
  end
110
193
  end
@@ -149,12 +149,12 @@ module Sequel
149
149
  from(:pg_type).
150
150
  where(:oid=>enum_labels.keys).
151
151
  exclude(:typarray=>0).
152
- select_map([:typname, Sequel.cast(:typarray, Integer).as(:v)])
152
+ select_map([:typname, Sequel.cast(:typarray, Integer).as(:v), Sequel.cast(:oid, Integer).as(:sv)])
153
153
 
154
154
  existing_oids = conversion_procs.keys
155
- array_types.each do |name, oid|
155
+ array_types.each do |name, oid, scalar_oid|
156
156
  next if existing_oids.include?(oid)
157
- register_array_type(name, :oid=>oid)
157
+ register_array_type(name, :oid=>oid, :scalar_oid=>scalar_oid)
158
158
  end
159
159
  end
160
160
 
@@ -113,6 +113,7 @@ module Sequel
113
113
  # automatically casted to the database type when literalizing.
114
114
  def self.subclass(db_type)
115
115
  Class.new(self) do
116
+ Sequel.set_temp_name(self){"Sequel::Postgres::PGRow::ArrayRow::_Subclass(#{db_type})"}
116
117
  @db_type = db_type
117
118
  end
118
119
  end
@@ -170,6 +171,7 @@ module Sequel
170
171
  # type and columns.
171
172
  def self.subclass(db_type, columns)
172
173
  Class.new(self) do
174
+ Sequel.set_temp_name(self){"Sequel::Postgres::PGRow::HashRow::_Subclass(#{db_type})"}
173
175
  @db_type = db_type
174
176
  @columns = columns
175
177
  end
@@ -391,7 +393,7 @@ module Sequel
391
393
  db.instance_exec do
392
394
  @row_types = {}
393
395
  @row_schema_types = {}
394
- extend(@row_type_method_module = Module.new)
396
+ extend(@row_type_method_module = Sequel.set_temp_name(Module.new){"Sequel::Postgres::PGRow::DatabaseMethods::_RowTypeMethodModule"})
395
397
  add_conversion_proc(2249, PGRow::Parser.new(:converter=>PGRow::ArrayRow))
396
398
  if respond_to?(:register_array_type)
397
399
  register_array_type('record', :oid=>2287, :scalar_oid=>2249)
@@ -0,0 +1,90 @@
1
+ # frozen-string-literal: true
2
+ #
3
+ # The pg_schema_caching extension builds on top of the schema_caching
4
+ # extension, and allows it to handle custom PostgreSQL types. On
5
+ # PostgreSQL, column schema hashes include an :oid entry for the OID
6
+ # for the column's type. For custom types, this OID is dependent on
7
+ # the PostgreSQL database, so in most cases, test and development
8
+ # versions of the same database, created with the same migrations,
9
+ # will have different OIDs.
10
+ #
11
+ # To fix this case, the pg_schema_caching extension removes custom
12
+ # OIDs from the schema cache when dumping the schema, replacing them
13
+ # with a placeholder. When loading the cached schema, the Database
14
+ # object makes a single query to get the OIDs for all custom types
15
+ # used by the cached schema, and it updates all related column
16
+ # schema hashes to set the correct :oid entry for the current
17
+ # database.
18
+ #
19
+ # Related module: Sequel::Postgres::SchemaCaching
20
+
21
+ require_relative "schema_caching"
22
+
23
+ module Sequel
24
+ module Postgres
25
+ module SchemaCaching
26
+ include Sequel::SchemaCaching
27
+
28
+ private
29
+
30
+ # Load custom oids from database when loading schema cache file.
31
+ def load_schema_cache_file(file)
32
+ set_custom_oids_for_cached_schema(super)
33
+ end
34
+
35
+ # Find all column schema hashes that use custom types.
36
+ # Load the oids for custom types in a single query, and update
37
+ # each related column schema hash with the correct oid.
38
+ def set_custom_oids_for_cached_schema(schemas)
39
+ custom_oid_rows = {}
40
+
41
+ schemas.each_value do |cols|
42
+ cols.each do |_, h|
43
+ if h[:oid] == :custom
44
+ (custom_oid_rows[h[:db_type]] ||= []) << h
45
+ end
46
+ end
47
+ end
48
+
49
+ unless custom_oid_rows.empty?
50
+ from(:pg_type).where(:typname=>custom_oid_rows.keys).select_hash(:typname, :oid).each do |name, oid|
51
+ custom_oid_rows.delete(name).each do |row|
52
+ row[:oid] = oid
53
+ end
54
+ end
55
+ end
56
+
57
+ unless custom_oid_rows.empty?
58
+ warn "Could not load OIDs for the following custom types: #{custom_oid_rows.keys.sort.join(", ")}", uplevel: 3
59
+
60
+ schemas.keys.each do |k|
61
+ if schemas[k].any?{|_,h| h[:oid] == :custom}
62
+ # Remove schema entry for table, so it will be queried at runtime to get the correct oids
63
+ schemas.delete(k)
64
+ end
65
+ end
66
+ end
67
+
68
+ schemas
69
+ end
70
+
71
+ # Replace :oid entries for custom types with :custom.
72
+ def dumpable_schema_cache
73
+ sch = super
74
+
75
+ sch.each_value do |cols|
76
+ cols.each do |_, h|
77
+ if (oid = h[:oid]) && oid >= 10000
78
+ h[:oid] = :custom
79
+ end
80
+ end
81
+ end
82
+
83
+ sch
84
+ end
85
+ end
86
+ end
87
+
88
+ Database.register_extension(:pg_schema_caching, Postgres::SchemaCaching)
89
+ end
90
+
@@ -0,0 +1,172 @@
1
+ # frozen-string-literal: true
2
+ #
3
+ # The query_blocker extension adds Database#block_queries.
4
+ # Inside the block passed to #block_queries, any attempts to
5
+ # execute a query/statement on the database will raise a
6
+ # Sequel::QueryBlocker::BlockedQuery exception.
7
+ #
8
+ # DB.extension :query_blocker
9
+ # DB.block_queries do
10
+ # ds = DB[:table] # No exception
11
+ # ds = ds.where(column: 1) # No exception
12
+ # ds.all # Exception raised
13
+ # end
14
+ #
15
+ # To handle concurrency, you can pass a :scope option:
16
+ #
17
+ # # Current Thread
18
+ # DB.block_queries(scope: :thread){}
19
+ #
20
+ # # Current Fiber
21
+ # DB.block_queries(scope: :fiber){}
22
+ #
23
+ # # Specific Thread
24
+ # DB.block_queries(scope: Thread.current){}
25
+ #
26
+ # # Specific Fiber
27
+ # DB.block_queries(scope: Fiber.current){}
28
+ #
29
+ # Database#block_queries is useful for blocking queries inside
30
+ # the block. However, there may be cases where you want to
31
+ # allow queries in specific places inside a block_queries block.
32
+ # You can use Database#allow_queries for that:
33
+ #
34
+ # DB.block_queries do
35
+ # DB.allow_queries do
36
+ # DB[:table].all # Query allowed
37
+ # end
38
+ #
39
+ # DB[:table].all # Exception raised
40
+ # end
41
+ #
42
+ # When mixing block_queries and allow_queries with scopes, the
43
+ # narrowest scope has priority. So if you are blocking with
44
+ # :thread scope, and allowing with :fiber scope, queries in the
45
+ # current fiber will be allowed, but queries in different fibers of
46
+ # the current thread will be blocked.
47
+ #
48
+ # Note that this should catch all queries executed through the
49
+ # Database instance. Whether it catches queries executed directly
50
+ # on a connection object depends on the adapter in use.
51
+ #
52
+ # Related module: Sequel::QueryBlocker
53
+
54
+ require "fiber"
55
+
56
+ #
57
+ module Sequel
58
+ module QueryBlocker
59
+ # Exception class raised if there is an attempt to execute a
60
+ # query/statement on the database inside a block passed to
61
+ # block_queries.
62
+ class BlockedQuery < Sequel::Error
63
+ end
64
+
65
+ def self.extended(db)
66
+ db.instance_exec do
67
+ @blocked_query_scopes ||= {}
68
+ end
69
+ end
70
+
71
+ # If checking a connection for validity, and a BlockedQuery exception is
72
+ # raised, treat it as a valid connection. You cannot check whether the
73
+ # connection is valid without issuing a query, and if queries are blocked,
74
+ # you need to assume it is valid or assume it is not. Since it most cases
75
+ # it will be valid, this assumes validity.
76
+ def valid_connection?(conn)
77
+ super
78
+ rescue BlockedQuery
79
+ true
80
+ end
81
+
82
+ # Check whether queries are blocked before executing them.
83
+ def log_connection_yield(sql, conn, args=nil)
84
+ # All database adapters should be calling this method around
85
+ # query execution (otherwise the queries would not get logged),
86
+ # ensuring the blocking is checked. Any database adapter issuing
87
+ # a query without calling this method is considered buggy.
88
+ check_blocked_queries!
89
+ super
90
+ end
91
+
92
+ # Whether queries are currently blocked.
93
+ def block_queries?
94
+ b = @blocked_query_scopes
95
+ b.fetch(Fiber.current) do
96
+ b.fetch(Thread.current) do
97
+ b.fetch(:global, false)
98
+ end
99
+ end
100
+ end
101
+
102
+ # Allow queries inside the block. Only useful if they are already blocked
103
+ # for the same scope. Useful for blocking queries generally, and only allowing
104
+ # them in specific places. Takes the same :scope option as #block_queries.
105
+ def allow_queries(opts=OPTS, &block)
106
+ _allow_or_block_queries(false, opts, &block)
107
+ end
108
+
109
+ # Reject (raise an BlockedQuery exception) if there is an attempt to execute
110
+ # a query/statement inside the block.
111
+ #
112
+ # The :scope option indicates which queries are rejected inside the block:
113
+ #
114
+ # :global :: This is the default, and rejects all queries.
115
+ # :thread :: Reject all queries in the current thread.
116
+ # :fiber :: Reject all queries in the current fiber.
117
+ # Thread :: Reject all queries in the given thread.
118
+ # Fiber :: Reject all queries in the given fiber.
119
+ def block_queries(opts=OPTS, &block)
120
+ _allow_or_block_queries(true, opts, &block)
121
+ end
122
+
123
+ private
124
+
125
+ # Internals of block_queries and allow_queries.
126
+ def _allow_or_block_queries(value, opts)
127
+ scope = query_blocker_scope(opts)
128
+ prev_value = nil
129
+ scopes = @blocked_query_scopes
130
+
131
+ begin
132
+ Sequel.synchronize do
133
+ prev_value = scopes[scope]
134
+ scopes[scope] = value
135
+ end
136
+
137
+ yield
138
+ ensure
139
+ Sequel.synchronize do
140
+ if prev_value.nil?
141
+ scopes.delete(scope)
142
+ else
143
+ scopes[scope] = prev_value
144
+ end
145
+ end
146
+ end
147
+ end
148
+
149
+ # The scope for the query block, either :global, or a Thread or Fiber instance.
150
+ def query_blocker_scope(opts)
151
+ case scope = opts[:scope]
152
+ when nil
153
+ :global
154
+ when :global, Thread, Fiber
155
+ scope
156
+ when :thread
157
+ Thread.current
158
+ when :fiber
159
+ Fiber.current
160
+ else
161
+ raise Sequel::Error, "invalid scope given to block_queries: #{scope.inspect}"
162
+ end
163
+ end
164
+
165
+ # Raise a BlockQuery exception if queries are currently blocked.
166
+ def check_blocked_queries!
167
+ raise BlockedQuery, "cannot execute query inside a block_queries block" if block_queries?
168
+ end
169
+ end
170
+
171
+ Database.register_extension(:query_blocker, QueryBlocker)
172
+ end