sequel 5.19.0 → 5.24.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (91) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG +102 -0
  3. data/doc/dataset_filtering.rdoc +15 -0
  4. data/doc/opening_databases.rdoc +5 -1
  5. data/doc/release_notes/5.20.0.txt +89 -0
  6. data/doc/release_notes/5.21.0.txt +87 -0
  7. data/doc/release_notes/5.22.0.txt +48 -0
  8. data/doc/release_notes/5.23.0.txt +56 -0
  9. data/doc/release_notes/5.24.0.txt +56 -0
  10. data/doc/sharding.rdoc +2 -0
  11. data/doc/testing.rdoc +1 -0
  12. data/doc/transactions.rdoc +38 -0
  13. data/lib/sequel/adapters/ado.rb +27 -19
  14. data/lib/sequel/adapters/jdbc.rb +7 -1
  15. data/lib/sequel/adapters/jdbc/mysql.rb +2 -2
  16. data/lib/sequel/adapters/jdbc/postgresql.rb +1 -13
  17. data/lib/sequel/adapters/jdbc/sqlite.rb +29 -0
  18. data/lib/sequel/adapters/mysql2.rb +2 -3
  19. data/lib/sequel/adapters/shared/mssql.rb +7 -7
  20. data/lib/sequel/adapters/shared/postgres.rb +37 -19
  21. data/lib/sequel/adapters/shared/sqlite.rb +27 -3
  22. data/lib/sequel/adapters/sqlite.rb +1 -1
  23. data/lib/sequel/adapters/tinytds.rb +12 -0
  24. data/lib/sequel/adapters/utils/mysql_mysql2.rb +2 -0
  25. data/lib/sequel/database/logging.rb +7 -1
  26. data/lib/sequel/database/query.rb +1 -1
  27. data/lib/sequel/database/schema_generator.rb +12 -3
  28. data/lib/sequel/database/schema_methods.rb +2 -0
  29. data/lib/sequel/database/transactions.rb +57 -5
  30. data/lib/sequel/dataset.rb +4 -2
  31. data/lib/sequel/dataset/actions.rb +3 -2
  32. data/lib/sequel/dataset/placeholder_literalizer.rb +4 -1
  33. data/lib/sequel/dataset/query.rb +5 -1
  34. data/lib/sequel/dataset/sql.rb +11 -7
  35. data/lib/sequel/extensions/named_timezones.rb +52 -8
  36. data/lib/sequel/extensions/pg_array.rb +4 -0
  37. data/lib/sequel/extensions/pg_json.rb +387 -123
  38. data/lib/sequel/extensions/pg_range.rb +3 -2
  39. data/lib/sequel/extensions/pg_row.rb +3 -1
  40. data/lib/sequel/extensions/schema_dumper.rb +1 -1
  41. data/lib/sequel/extensions/server_block.rb +15 -4
  42. data/lib/sequel/model/associations.rb +35 -9
  43. data/lib/sequel/model/plugins.rb +104 -0
  44. data/lib/sequel/plugins/association_dependencies.rb +3 -3
  45. data/lib/sequel/plugins/association_pks.rb +14 -4
  46. data/lib/sequel/plugins/association_proxies.rb +3 -2
  47. data/lib/sequel/plugins/class_table_inheritance.rb +11 -0
  48. data/lib/sequel/plugins/composition.rb +13 -9
  49. data/lib/sequel/plugins/finder.rb +2 -2
  50. data/lib/sequel/plugins/hook_class_methods.rb +17 -5
  51. data/lib/sequel/plugins/insert_conflict.rb +72 -0
  52. data/lib/sequel/plugins/inverted_subsets.rb +2 -2
  53. data/lib/sequel/plugins/pg_auto_constraint_validations.rb +147 -59
  54. data/lib/sequel/plugins/rcte_tree.rb +6 -0
  55. data/lib/sequel/plugins/static_cache.rb +8 -3
  56. data/lib/sequel/plugins/static_cache_cache.rb +53 -0
  57. data/lib/sequel/plugins/subset_conditions.rb +2 -2
  58. data/lib/sequel/plugins/validation_class_methods.rb +5 -3
  59. data/lib/sequel/sql.rb +15 -3
  60. data/lib/sequel/timezones.rb +50 -11
  61. data/lib/sequel/version.rb +1 -1
  62. data/spec/adapters/mssql_spec.rb +24 -0
  63. data/spec/adapters/mysql_spec.rb +0 -5
  64. data/spec/adapters/postgres_spec.rb +319 -1
  65. data/spec/bin_spec.rb +1 -1
  66. data/spec/core/database_spec.rb +123 -2
  67. data/spec/core/dataset_spec.rb +33 -1
  68. data/spec/core/expression_filters_spec.rb +25 -1
  69. data/spec/core/schema_spec.rb +24 -0
  70. data/spec/extensions/class_table_inheritance_spec.rb +30 -8
  71. data/spec/extensions/core_refinements_spec.rb +1 -1
  72. data/spec/extensions/hook_class_methods_spec.rb +22 -0
  73. data/spec/extensions/insert_conflict_spec.rb +103 -0
  74. data/spec/extensions/migration_spec.rb +13 -0
  75. data/spec/extensions/named_timezones_spec.rb +109 -2
  76. data/spec/extensions/pg_auto_constraint_validations_spec.rb +45 -0
  77. data/spec/extensions/pg_json_spec.rb +218 -29
  78. data/spec/extensions/pg_range_spec.rb +76 -9
  79. data/spec/extensions/rcte_tree_spec.rb +6 -0
  80. data/spec/extensions/s_spec.rb +1 -1
  81. data/spec/extensions/schema_dumper_spec.rb +4 -2
  82. data/spec/extensions/server_block_spec.rb +38 -0
  83. data/spec/extensions/spec_helper.rb +8 -1
  84. data/spec/extensions/static_cache_cache_spec.rb +35 -0
  85. data/spec/integration/dataset_test.rb +25 -9
  86. data/spec/integration/plugin_test.rb +42 -0
  87. data/spec/integration/schema_test.rb +7 -2
  88. data/spec/integration/transaction_test.rb +50 -0
  89. data/spec/model/associations_spec.rb +84 -4
  90. data/spec/model/plugins_spec.rb +111 -0
  91. metadata +16 -2
@@ -140,6 +140,8 @@ the shard to use. This is fairly easy using a Sequel::Model:
140
140
 
141
141
  Rainbow.plaintext_for_hash("e580726d31f6e1ad216ffd87279e536d1f74e606")
142
142
 
143
+ === :servers_hash Option
144
+
143
145
  The connection pool can be further controlled to change how it handles attempts
144
146
  to access shards that haven't been configured. The default is
145
147
  to assume the :default shard. However, you can specify a
@@ -169,3 +169,4 @@ SEQUEL_NO_PENDING :: Don't skip any specs, try running all specs (note, can caus
169
169
  SEQUEL_PG_TIMESTAMPTZ :: Use the pg_timestamptz extension when running the postgres specs
170
170
  SEQUEL_SPLIT_SYMBOLS :: Turn on symbol splitting when running the adapter and integration specs
171
171
  SEQUEL_SYNCHRONIZE_SQL :: Use the synchronize_sql extension when running the specs
172
+ SEQUEL_TZINFO_VERSION :: Force the given tzinfo version when running the specs (e.g. '>=2')
@@ -169,6 +169,44 @@ If you want the current savepoint and potentially enclosing savepoints to be rol
169
169
  end # ROLLBACK TO SAVEPOINT
170
170
  end # ROLLBACK
171
171
 
172
+ === Savepoint Hooks
173
+
174
+ When using savepoints, you can use the +:savepoint+ option to +after_commit+ or +after_rollback+ to use a savepoint hook. For +after_commit+, this will only run the hook after transaction commit if all enclosing savepoints are released (not rolled back). For +after_rollback+, this will run the hook after any enclosing savepoint is rolled back (before transaction commit), or after the transaction is rolled back if all enclosing savepoints are released:
175
+
176
+ x = nil
177
+ DB.transaction do # BEGIN
178
+ DB.transaction(savepoint: true) do # SAVEPOINT
179
+ DB.after_commit(savepoint: true){x = 1}
180
+ DB.after_rollback(savepoint: true){x = 2}
181
+ x # nil
182
+ end # RELEASE SAVEPOINT
183
+ x # nil
184
+ end # COMMIT
185
+ x # 1
186
+
187
+ x = nil
188
+ DB.transaction do # BEGIN
189
+ DB.transaction(savepoint: true) do # SAVEPOINT
190
+ DB.after_commit(savepoint: true){x = 1}
191
+ DB.after_rollback(savepoint: true){x = 2}
192
+ x # nil
193
+ raise Sequel::Rollback
194
+ end # ROLLBACK TO SAVEPOINT
195
+ x # 2
196
+ end # COMMIT
197
+ x # 2
198
+
199
+ x = nil
200
+ DB.transaction do # BEGIN
201
+ DB.transaction(savepoint: true) do # SAVEPOINT
202
+ DB.after_commit(savepoint: true){x = 1}
203
+ DB.after_rollback(savepoint: true){x = 2}
204
+ end # RELEASE SAVEPOINT
205
+ x # nil
206
+ raise Sequel::Rollback
207
+ end
208
+ x # 2
209
+
172
210
  == Prepared Transactions / Two-Phase Commit
173
211
 
174
212
  Sequel supports database prepared transactions on PostgreSQL, MySQL, and H2. With prepared transactions, at the end of the transaction, the transaction is not immediately committed (it acts like a rollback). Later, you can call +commit_prepared_transaction+ to commit the transaction or +rollback_prepared_transaction+ to roll the transaction back. Prepared transactions are usually used with distributed databases to make sure all databases commit the same transaction or none of them do.
@@ -47,34 +47,40 @@ module Sequel
47
47
  #AdVarWChar = 202
48
48
  #AdWChar = 130
49
49
 
50
- cp = Object.new
51
-
52
- def cp.bigint(v)
50
+ bigint = Object.new
51
+ def bigint.call(v)
53
52
  v.to_i
54
53
  end
55
54
 
56
- def cp.numeric(v)
57
- BigDecimal(v)
55
+ numeric = Object.new
56
+ def numeric.call(v)
57
+ if v.include?(',')
58
+ BigDecimal(v.tr(',', '.'))
59
+ else
60
+ BigDecimal(v)
61
+ end
58
62
  end
59
63
 
60
- def cp.binary(v)
64
+ binary = Object.new
65
+ def binary.call(v)
61
66
  Sequel.blob(v.pack('c*'))
62
67
  end
63
68
 
64
- def cp.date(v)
69
+ date = Object.new
70
+ def date.call(v)
65
71
  Date.new(v.year, v.month, v.day)
66
72
  end
67
73
 
68
74
  CONVERSION_PROCS = {}
69
75
  [
70
- [:bigint, AdBigInt],
71
- [:numeric, AdNumeric, AdVarNumeric],
72
- [:date, AdDBDate],
73
- [:binary, AdBinary, AdVarBinary, AdLongVarBinary]
74
- ].each do |meth, *types|
75
- method = cp.method(meth)
76
+ [bigint, AdBigInt],
77
+ [numeric, AdNumeric, AdVarNumeric],
78
+ [date, AdDBDate],
79
+ [binary, AdBinary, AdVarBinary, AdLongVarBinary]
80
+ ].each do |callable, *types|
81
+ callable.freeze
76
82
  types.each do |i|
77
- CONVERSION_PROCS[i] = method
83
+ CONVERSION_PROCS[i] = callable
78
84
  end
79
85
  end
80
86
  CONVERSION_PROCS.freeze
@@ -227,7 +233,6 @@ module Sequel
227
233
  cols = []
228
234
  conversion_procs = db.conversion_procs
229
235
 
230
- i = -1
231
236
  ts_cp = nil
232
237
  recordset.Fields.each do |field|
233
238
  type = field.Type
@@ -244,18 +249,21 @@ module Sequel
244
249
  else
245
250
  conversion_procs[type]
246
251
  end
247
- cols << [output_identifier(field.Name), cp, i+=1]
252
+ cols << [output_identifier(field.Name), cp]
248
253
  end
249
254
 
250
255
  self.columns = cols.map(&:first)
251
256
  return if recordset.EOF
257
+ max = cols.length
252
258
 
253
259
  recordset.GetRows.transpose.each do |field_values|
254
260
  h = {}
255
261
 
256
- cols.each do |name, cp, index|
257
- h[name] = if (v = field_values[index]) && cp
258
- cp[v]
262
+ i = -1
263
+ while (i += 1) < max
264
+ name, cp = cols[i]
265
+ h[name] = if (v = field_values[i]) && cp
266
+ cp.call(v)
259
267
  else
260
268
  v
261
269
  end
@@ -102,12 +102,17 @@ module Sequel
102
102
  v.getSubString(1, v.length)
103
103
  end
104
104
  end
105
+ x = convertors[:RubyArray] = Object.new
106
+ def x.call(r, i)
107
+ if v = r.getArray(i)
108
+ v.array.to_ary
109
+ end
110
+ end
105
111
 
106
112
  MAP = Hash.new(convertors[:Object])
107
113
  types = Java::JavaSQL::Types
108
114
 
109
115
  {
110
- :ARRAY => :Array,
111
116
  :BOOLEAN => :Boolean,
112
117
  :CHAR => :String,
113
118
  :DOUBLE => :Double,
@@ -126,6 +131,7 @@ module Sequel
126
131
  BASIC_MAP = MAP.dup
127
132
 
128
133
  {
134
+ :ARRAY => :Array,
129
135
  :BINARY => :Blob,
130
136
  :BLOB => :Blob,
131
137
  :CLOB => :Clob,
@@ -54,12 +54,12 @@ module Sequel
54
54
  # MySQL 5.1.12 JDBC adapter requires generated keys
55
55
  # and previous versions don't mind.
56
56
  def execute_statement_insert(stmt, sql)
57
- stmt.executeUpdate(sql, JavaSQL::Statement.RETURN_GENERATED_KEYS)
57
+ stmt.executeUpdate(sql, JavaSQL::Statement::RETURN_GENERATED_KEYS)
58
58
  end
59
59
 
60
60
  # Return generated keys for insert statements.
61
61
  def prepare_jdbc_statement(conn, sql, opts)
62
- opts[:type] == :insert ? conn.prepareStatement(sql, JavaSQL::Statement.RETURN_GENERATED_KEYS) : super
62
+ opts[:type] == :insert ? conn.prepareStatement(sql, JavaSQL::Statement::RETURN_GENERATED_KEYS) : super
63
63
  end
64
64
 
65
65
  # Convert tinyint(1) type to boolean
@@ -195,17 +195,7 @@ module Sequel
195
195
 
196
196
  STRING_TYPE = Java::JavaSQL::Types::VARCHAR
197
197
  ARRAY_TYPE = Java::JavaSQL::Types::ARRAY
198
- PG_SPECIFIC_TYPES = [ARRAY_TYPE, Java::JavaSQL::Types::OTHER, Java::JavaSQL::Types::STRUCT, Java::JavaSQL::Types::TIME_WITH_TIMEZONE, Java::JavaSQL::Types::TIME].freeze
199
-
200
- # Return PostgreSQL array types as ruby Arrays instead of
201
- # JDBC PostgreSQL driver-specific array type. Only used if the
202
- # database does not have a conversion proc for the type.
203
- ARRAY_METHOD = Object.new
204
- def ARRAY_METHOD.call(r, i)
205
- if v = r.getArray(i)
206
- v.array.to_ary
207
- end
208
- end
198
+ PG_SPECIFIC_TYPES = [Java::JavaSQL::Types::ARRAY, Java::JavaSQL::Types::OTHER, Java::JavaSQL::Types::STRUCT, Java::JavaSQL::Types::TIME_WITH_TIMEZONE, Java::JavaSQL::Types::TIME].freeze
209
199
 
210
200
  # Return PostgreSQL hstore types as ruby Hashes instead of
211
201
  # Java HashMaps. Only used if the database does not have a
@@ -223,8 +213,6 @@ module Sequel
223
213
  oid = meta.getField(i).getOID
224
214
  if pr = db.oid_convertor_proc(oid)
225
215
  pr
226
- elsif type == ARRAY_TYPE
227
- ARRAY_METHOD
228
216
  elsif oid == 2950 # UUID
229
217
  map[STRING_TYPE]
230
218
  elsif meta.getPGType(i) == 'hstore'
@@ -15,6 +15,24 @@ module Sequel
15
15
  end
16
16
 
17
17
  module SQLite
18
+ module ForeignKeyListPragmaConvertorFix
19
+ # For the use of the convertor for String, working around a bug
20
+ # in jdbc-sqlite3 that reports fields are of type
21
+ # java.sql.types.NUMERIC even though they contain non-numeric data.
22
+ def type_convertor(_, _, _, i)
23
+ i > 2 ? TypeConvertor::CONVERTORS[:String] : super
24
+ end
25
+ end
26
+
27
+ module TableInfoPragmaConvertorFix
28
+ # For the use of the convertor for String, working around a bug
29
+ # in jdbc-sqlite3 that reports dflt_value field is of type
30
+ # java.sql.types.NUMERIC even though they contain string data.
31
+ def type_convertor(_, _, _, i)
32
+ i == 5 ? TypeConvertor::CONVERTORS[:String] : super
33
+ end
34
+ end
35
+
18
36
  module DatabaseMethods
19
37
  include Sequel::SQLite::DatabaseMethods
20
38
 
@@ -37,6 +55,17 @@ module Sequel
37
55
  end
38
56
 
39
57
  private
58
+
59
+
60
+ # Add workaround for bug when running foreign_key_list pragma
61
+ def _foreign_key_list_ds(_)
62
+ super.with_extend(ForeignKeyListPragmaConvertorFix)
63
+ end
64
+
65
+ # Add workaround for bug when running table_info pragma
66
+ def _parse_pragma_ds(_, _)
67
+ super.with_extend(TableInfoPragmaConvertorFix)
68
+ end
40
69
 
41
70
  DATABASE_ERROR_REGEXPS = Sequel::SQLite::DatabaseMethods::DATABASE_ERROR_REGEXPS.merge(/Abort due to constraint violation/ => ConstraintViolation).freeze
42
71
  def database_error_regexps
@@ -36,7 +36,6 @@ module Sequel
36
36
  # options such as :local_infile.
37
37
  def connect(server)
38
38
  opts = server_opts(server)
39
- opts[:host] ||= 'localhost'
40
39
  opts[:username] ||= opts.delete(:user)
41
40
  opts[:flags] ||= 0
42
41
  opts[:flags] |= ::Mysql2::Client::FOUND_ROWS if ::Mysql2::Client.const_defined?(:FOUND_ROWS)
@@ -78,8 +77,8 @@ module Sequel
78
77
  end
79
78
 
80
79
  # Return the version of the MySQL server to which we are connecting.
81
- def server_version(server=nil)
82
- @server_version ||= (synchronize(server){|conn| conn.server_info[:id]} || super)
80
+ def server_version(_server=nil)
81
+ @server_version ||= super()
83
82
  end
84
83
 
85
84
  private
@@ -427,20 +427,19 @@ module Sequel
427
427
  m = output_identifier_meth(opts[:dataset])
428
428
  m2 = input_identifier_meth(opts[:dataset])
429
429
  tn = m2.call(table_name.to_s)
430
- table_id = get(Sequel.function(:object_id, tn))
431
430
  info_sch_sch = opts[:information_schema_schema]
432
431
  inf_sch_qual = lambda{|s| info_sch_sch ? Sequel.qualify(info_sch_sch, s) : Sequel[s]}
433
- sys_qual = lambda{|s| info_sch_sch ? Sequel.qualify(info_sch_sch, Sequel.qualify(Sequel.lit(''), s)) : Sequel[s]}
432
+ table_id = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:objects])).where(:name => tn).select_map(:object_id).first
434
433
 
435
- identity_cols = metadata_dataset.from(Sequel.lit('[sys].[columns]')).
434
+ identity_cols = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:columns])).
436
435
  where(:object_id=>table_id, :is_identity=>true).
437
436
  select_map(:name)
438
437
 
439
- pk_index_id = metadata_dataset.from(sys_qual.call(Sequel.lit('sysindexes'))).
438
+ pk_index_id = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:sysindexes])).
440
439
  where(:id=>table_id, :indid=>1..254){{(status & 2048)=>2048}}.
441
440
  get(:indid)
442
- pk_cols = metadata_dataset.from(sys_qual.call(Sequel.lit('sysindexkeys')).as(:sik)).
443
- join(sys_qual.call(Sequel.lit('syscolumns')).as(:sc), :id=>:id, :colid=>:colid).
441
+ pk_cols = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:sysindexkeys]).as(:sik)).
442
+ join(inf_sch_qual.call(Sequel[:sys][:syscolumns]).as(:sc), :id=>:id, :colid=>:colid).
444
443
  where{{sik[:id]=>table_id, sik[:indid]=>pk_index_id}}.
445
444
  select_order_map{sc[:name]}
446
445
 
@@ -765,8 +764,9 @@ module Sequel
765
764
  output(nil, [SQL::QualifiedIdentifier.new(:inserted, first_primary_key)])._import(columns, values, opts)
766
765
  elsif @opts[:output]
767
766
  statements = multi_insert_sql(columns, values)
767
+ ds = naked
768
768
  @db.transaction(opts.merge(:server=>@opts[:server])) do
769
- statements.map{|st| with_sql(st)}
769
+ statements.map{|st| ds.with_sql(st)}
770
770
  end.first.map{|v| v.length == 1 ? v.values.first : v}
771
771
  else
772
772
  super
@@ -97,13 +97,17 @@ module Sequel
97
97
  # Add an exclusion constraint when creating the table. Elements should be
98
98
  # an array of 2 element arrays, with the first element being the column or
99
99
  # expression the exclusion constraint is applied to, and the second element
100
- # being the operator to use for the column/expression to check for exclusion.
101
- #
102
- # Example:
100
+ # being the operator to use for the column/expression to check for exclusion:
103
101
  #
104
102
  # exclude([[:col1, '&&'], [:col2, '=']])
105
103
  # # EXCLUDE USING gist (col1 WITH &&, col2 WITH =)
106
104
  #
105
+ # To use a custom operator class, you need to use Sequel.lit with the expression
106
+ # and operator class:
107
+ #
108
+ # exclude([[Sequel.lit('col1 inet_ops'), '&&'], [:col2, '=']])
109
+ # # EXCLUDE USING gist (col1 inet_ops WITH &&, col2 WITH =)
110
+ #
107
111
  # Options supported:
108
112
  #
109
113
  # :name :: Name the constraint with the given name (useful if you may
@@ -147,10 +151,10 @@ module Sequel
147
151
  SELECT_CUSTOM_SEQUENCE_SQL = (<<-end_sql
148
152
  SELECT name.nspname AS "schema",
149
153
  CASE
150
- WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN
151
- substr(split_part(def.adsrc, '''', 2),
152
- strpos(split_part(def.adsrc, '''', 2), '.')+1)
153
- ELSE split_part(def.adsrc, '''', 2)
154
+ WHEN split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2) ~ '.' THEN
155
+ substr(split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2),
156
+ strpos(split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2), '.')+1)
157
+ ELSE split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2)
154
158
  END AS "sequence"
155
159
  FROM pg_class t
156
160
  JOIN pg_namespace name ON (t.relnamespace = name.oid)
@@ -158,7 +162,7 @@ module Sequel
158
162
  JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
159
163
  JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
160
164
  WHERE cons.contype = 'p'
161
- AND def.adsrc ~* 'nextval'
165
+ AND pg_get_expr(def.adbin, attr.attrelid) ~* 'nextval'
162
166
  end_sql
163
167
  ).strip.gsub(/\s+/, ' ').freeze
164
168
 
@@ -220,24 +224,22 @@ module Sequel
220
224
  # A hash of metadata for CHECK constraints on the table.
221
225
  # Keys are CHECK constraint name symbols. Values are hashes with the following keys:
222
226
  # :definition :: An SQL fragment for the definition of the constraint
223
- # :columns :: An array of column symbols for the columns referenced in the constraint
227
+ # :columns :: An array of column symbols for the columns referenced in the constraint,
228
+ # can be an empty array if the database cannot deteremine the column symbols.
224
229
  def check_constraints(table)
225
230
  m = output_identifier_meth
226
231
 
227
232
  rows = metadata_dataset.
228
233
  from{pg_constraint.as(:co)}.
229
- join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).
234
+ left_join(Sequel[:pg_attribute].as(:att), :attrelid=>:conrelid, :attnum=>SQL::Function.new(:ANY, Sequel[:co][:conkey])).
230
235
  where(:conrelid=>regclass_oid(table), :contype=>'c').
231
236
  select{[co[:conname].as(:constraint), att[:attname].as(:column), pg_get_constraintdef(co[:oid]).as(:definition)]}
232
237
 
233
238
  hash = {}
234
239
  rows.each do |row|
235
240
  constraint = m.call(row[:constraint])
236
- if entry = hash[constraint]
237
- entry[:columns] << m.call(row[:column])
238
- else
239
- hash[constraint] = {:definition=>row[:definition], :columns=>[m.call(row[:column])]}
240
- end
241
+ entry = hash[constraint] ||= {:definition=>row[:definition], :columns=>[]}
242
+ entry[:columns] << m.call(row[:column]) if row[:column]
241
243
  end
242
244
 
243
245
  hash
@@ -838,10 +840,14 @@ module Sequel
838
840
  # default value is given.
839
841
  def column_definition_default_sql(sql, column)
840
842
  super
841
- if !column[:serial] && !['serial', 'bigserial'].include?(column[:type].to_s) && !column[:default] && (identity = column[:identity])
842
- sql << " GENERATED "
843
- sql << (identity == :always ? "ALWAYS" : "BY DEFAULT")
844
- sql << " AS IDENTITY"
843
+ if !column[:serial] && !['serial', 'bigserial'].include?(column[:type].to_s) && !column[:default]
844
+ if (identity = column[:identity])
845
+ sql << " GENERATED "
846
+ sql << (identity == :always ? "ALWAYS" : "BY DEFAULT")
847
+ sql << " AS IDENTITY"
848
+ elsif (generated = column[:generated_always_as])
849
+ sql << " GENERATED ALWAYS AS (#{literal(generated)}) STORED"
850
+ end
845
851
  end
846
852
  end
847
853
 
@@ -1922,6 +1928,18 @@ module Sequel
1922
1928
  opts[:with].any?{|w| w[:recursive]} ? "WITH RECURSIVE " : super
1923
1929
  end
1924
1930
 
1931
+ # Support WITH AS [NOT] MATERIALIZED if :materialized option is used.
1932
+ def select_with_sql_prefix(sql, w)
1933
+ super
1934
+
1935
+ case w[:materialized]
1936
+ when true
1937
+ sql << "MATERIALIZED "
1938
+ when false
1939
+ sql << "NOT MATERIALIZED "
1940
+ end
1941
+ end
1942
+
1925
1943
  # The version of the database server
1926
1944
  def server_version
1927
1945
  db.server_version(@opts[:server])
@@ -10,6 +10,10 @@ module Sequel
10
10
  def self.mock_adapter_setup(db)
11
11
  db.instance_exec do
12
12
  @sqlite_version = 30903
13
+
14
+ def schema_parse_table(*)
15
+ []
16
+ end
13
17
  end
14
18
  end
15
19
 
@@ -60,7 +64,7 @@ module Sequel
60
64
  def foreign_key_list(table, opts=OPTS)
61
65
  m = output_identifier_meth
62
66
  h = {}
63
- metadata_dataset.with_sql("PRAGMA foreign_key_list(?)", input_identifier_meth.call(table)).each do |row|
67
+ _foreign_key_list_ds(table).each do |row|
64
68
  if r = h[row[:id]]
65
69
  r[:columns] << m.call(row[:from])
66
70
  r[:key] << m.call(row[:to]) if r[:key]
@@ -173,6 +177,16 @@ module Sequel
173
177
 
174
178
  private
175
179
 
180
+ # Dataset used for parsing foreign key lists
181
+ def _foreign_key_list_ds(table)
182
+ metadata_dataset.with_sql("PRAGMA foreign_key_list(?)", input_identifier_meth.call(table))
183
+ end
184
+
185
+ # Dataset used for parsing schema
186
+ def _parse_pragma_ds(table_name, opts)
187
+ metadata_dataset.with_sql("PRAGMA table_info(?)", input_identifier_meth(opts[:dataset]).call(table_name))
188
+ end
189
+
176
190
  # Run all alter_table commands in a transaction. This is technically only
177
191
  # needed for drop column.
178
192
  def apply_alter_table(table, ops)
@@ -445,7 +459,7 @@ module Sequel
445
459
  # Parse the output of the table_info pragma
446
460
  def parse_pragma(table_name, opts)
447
461
  pks = 0
448
- sch = metadata_dataset.with_sql("PRAGMA table_info(?)", input_identifier_meth(opts[:dataset]).call(table_name)).map do |row|
462
+ sch = _parse_pragma_ds(table_name, opts).map do |row|
449
463
  row.delete(:cid)
450
464
  row[:allow_null] = row.delete(:notnull).to_i == 0
451
465
  row[:default] = row.delete(:dflt_value)
@@ -513,7 +527,7 @@ module Sequel
513
527
 
514
528
  Dataset.def_sql_method(self, :delete, [['if db.sqlite_version >= 30803', %w'with delete from where'], ["else", %w'delete from where']])
515
529
  Dataset.def_sql_method(self, :insert, [['if db.sqlite_version >= 30803', %w'with insert conflict into columns values on_conflict'], ["else", %w'insert conflict into columns values']])
516
- Dataset.def_sql_method(self, :select, [['if opts[:values]', %w'with values compounds'], ['else', %w'with select distinct columns from join where group having compounds order limit lock']])
530
+ Dataset.def_sql_method(self, :select, [['if opts[:values]', %w'with values compounds'], ['else', %w'with select distinct columns from join where group having window compounds order limit lock']])
517
531
  Dataset.def_sql_method(self, :update, [['if db.sqlite_version >= 30803', %w'with update table set where'], ["else", %w'update table set where']])
518
532
 
519
533
  def cast_sql_append(sql, expr, type)
@@ -732,6 +746,11 @@ module Sequel
732
746
  def supports_where_true?
733
747
  false
734
748
  end
749
+
750
+ # SQLite 3.28+ supports the WINDOW clause.
751
+ def supports_window_clause?
752
+ db.sqlite_version >= 32800
753
+ end
735
754
 
736
755
  # SQLite 3.25+ supports window functions. However, support is only enabled
737
756
  # on SQLite 3.26.0+ because internal Sequel usage of window functions
@@ -741,6 +760,11 @@ module Sequel
741
760
  db.sqlite_version >= 32600
742
761
  end
743
762
 
763
+ # SQLite 3.28.0+ supports all window frame options that Sequel supports
764
+ def supports_window_function_frame_option?(option)
765
+ db.sqlite_version >= 32800 ? true : super
766
+ end
767
+
744
768
  private
745
769
 
746
770
  # SQLite uses string literals instead of identifiers in AS clauses.