sequel 5.21.0 → 5.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG +80 -0
  3. data/README.rdoc +1 -1
  4. data/doc/dataset_filtering.rdoc +15 -0
  5. data/doc/opening_databases.rdoc +3 -0
  6. data/doc/postgresql.rdoc +2 -2
  7. data/doc/release_notes/5.22.0.txt +48 -0
  8. data/doc/release_notes/5.23.0.txt +56 -0
  9. data/doc/release_notes/5.24.0.txt +56 -0
  10. data/doc/release_notes/5.25.0.txt +32 -0
  11. data/doc/release_notes/5.26.0.txt +35 -0
  12. data/doc/testing.rdoc +1 -0
  13. data/lib/sequel/adapters/jdbc.rb +7 -1
  14. data/lib/sequel/adapters/jdbc/postgresql.rb +1 -13
  15. data/lib/sequel/adapters/jdbc/sqlite.rb +29 -0
  16. data/lib/sequel/adapters/mysql2.rb +0 -1
  17. data/lib/sequel/adapters/shared/mssql.rb +11 -9
  18. data/lib/sequel/adapters/shared/postgres.rb +42 -12
  19. data/lib/sequel/adapters/shared/sqlite.rb +16 -2
  20. data/lib/sequel/adapters/tinytds.rb +12 -0
  21. data/lib/sequel/adapters/utils/mysql_mysql2.rb +1 -1
  22. data/lib/sequel/database/logging.rb +7 -1
  23. data/lib/sequel/database/schema_generator.rb +11 -2
  24. data/lib/sequel/database/schema_methods.rb +2 -0
  25. data/lib/sequel/dataset.rb +4 -2
  26. data/lib/sequel/dataset/actions.rb +3 -2
  27. data/lib/sequel/dataset/query.rb +4 -0
  28. data/lib/sequel/dataset/sql.rb +11 -7
  29. data/lib/sequel/extensions/named_timezones.rb +51 -9
  30. data/lib/sequel/extensions/pg_array.rb +4 -0
  31. data/lib/sequel/extensions/pg_json.rb +88 -17
  32. data/lib/sequel/extensions/pg_json_ops.rb +124 -0
  33. data/lib/sequel/extensions/pg_range.rb +12 -2
  34. data/lib/sequel/extensions/pg_row.rb +3 -1
  35. data/lib/sequel/extensions/sql_comments.rb +2 -2
  36. data/lib/sequel/model/base.rb +12 -5
  37. data/lib/sequel/plugins/association_multi_add_remove.rb +83 -0
  38. data/lib/sequel/plugins/association_proxies.rb +3 -2
  39. data/lib/sequel/plugins/caching.rb +3 -0
  40. data/lib/sequel/plugins/class_table_inheritance.rb +10 -0
  41. data/lib/sequel/plugins/csv_serializer.rb +26 -9
  42. data/lib/sequel/plugins/dirty.rb +3 -9
  43. data/lib/sequel/plugins/insert_conflict.rb +72 -0
  44. data/lib/sequel/plugins/nested_attributes.rb +7 -0
  45. data/lib/sequel/plugins/pg_auto_constraint_validations.rb +89 -30
  46. data/lib/sequel/plugins/sharding.rb +11 -5
  47. data/lib/sequel/plugins/static_cache.rb +8 -3
  48. data/lib/sequel/plugins/static_cache_cache.rb +53 -0
  49. data/lib/sequel/plugins/typecast_on_load.rb +3 -2
  50. data/lib/sequel/sql.rb +18 -4
  51. data/lib/sequel/timezones.rb +50 -11
  52. data/lib/sequel/version.rb +1 -1
  53. data/spec/adapters/postgres_spec.rb +174 -0
  54. data/spec/bin_spec.rb +2 -2
  55. data/spec/core/database_spec.rb +50 -0
  56. data/spec/core/dataset_spec.rb +33 -1
  57. data/spec/core/expression_filters_spec.rb +32 -3
  58. data/spec/core/schema_spec.rb +18 -0
  59. data/spec/core/spec_helper.rb +1 -1
  60. data/spec/core_extensions_spec.rb +1 -1
  61. data/spec/extensions/association_multi_add_remove_spec.rb +1041 -0
  62. data/spec/extensions/dirty_spec.rb +33 -0
  63. data/spec/extensions/insert_conflict_spec.rb +103 -0
  64. data/spec/extensions/named_timezones_spec.rb +109 -2
  65. data/spec/extensions/nested_attributes_spec.rb +48 -0
  66. data/spec/extensions/pg_auto_constraint_validations_spec.rb +37 -0
  67. data/spec/extensions/pg_json_ops_spec.rb +67 -0
  68. data/spec/extensions/pg_json_spec.rb +12 -0
  69. data/spec/extensions/pg_range_spec.rb +90 -9
  70. data/spec/extensions/sharding_spec.rb +8 -0
  71. data/spec/extensions/spec_helper.rb +9 -2
  72. data/spec/extensions/static_cache_cache_spec.rb +35 -0
  73. data/spec/guards_helper.rb +1 -1
  74. data/spec/integration/dataset_test.rb +24 -8
  75. data/spec/integration/plugin_test.rb +27 -0
  76. data/spec/integration/schema_test.rb +16 -2
  77. data/spec/model/spec_helper.rb +1 -1
  78. metadata +32 -2
@@ -15,6 +15,24 @@ module Sequel
15
15
  end
16
16
 
17
17
  module SQLite
18
+ module ForeignKeyListPragmaConvertorFix
19
+ # For the use of the convertor for String, working around a bug
20
+ # in jdbc-sqlite3 that reports fields are of type
21
+ # java.sql.types.NUMERIC even though they contain non-numeric data.
22
+ def type_convertor(_, _, _, i)
23
+ i > 2 ? TypeConvertor::CONVERTORS[:String] : super
24
+ end
25
+ end
26
+
27
+ module TableInfoPragmaConvertorFix
28
+ # For the use of the convertor for String, working around a bug
29
+ # in jdbc-sqlite3 that reports dflt_value field is of type
30
+ # java.sql.types.NUMERIC even though they contain string data.
31
+ def type_convertor(_, _, _, i)
32
+ i == 5 ? TypeConvertor::CONVERTORS[:String] : super
33
+ end
34
+ end
35
+
18
36
  module DatabaseMethods
19
37
  include Sequel::SQLite::DatabaseMethods
20
38
 
@@ -37,6 +55,17 @@ module Sequel
37
55
  end
38
56
 
39
57
  private
58
+
59
+
60
+ # Add workaround for bug when running foreign_key_list pragma
61
+ def _foreign_key_list_ds(_)
62
+ super.with_extend(ForeignKeyListPragmaConvertorFix)
63
+ end
64
+
65
+ # Add workaround for bug when running table_info pragma
66
+ def _parse_pragma_ds(_, _)
67
+ super.with_extend(TableInfoPragmaConvertorFix)
68
+ end
40
69
 
41
70
  DATABASE_ERROR_REGEXPS = Sequel::SQLite::DatabaseMethods::DATABASE_ERROR_REGEXPS.merge(/Abort due to constraint violation/ => ConstraintViolation).freeze
42
71
  def database_error_regexps
@@ -36,7 +36,6 @@ module Sequel
36
36
  # options such as :local_infile.
37
37
  def connect(server)
38
38
  opts = server_opts(server)
39
- opts[:host] ||= 'localhost'
40
39
  opts[:username] ||= opts.delete(:user)
41
40
  opts[:flags] ||= 0
42
41
  opts[:flags] |= ::Mysql2::Client::FOUND_ROWS if ::Mysql2::Client.const_defined?(:FOUND_ROWS)
@@ -279,7 +279,7 @@ module Sequel
279
279
  end
280
280
  end
281
281
  sqls << "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{column_definition_sql(op)}"
282
- sqls << alter_table_sql(table, op.merge(:op=>:set_column_default, :default=>default)) if default
282
+ sqls << alter_table_sql(table, op.merge(:op=>:set_column_default, :default=>default, :skip_drop_default=>true)) if default
283
283
  sqls
284
284
  when :set_column_null
285
285
  sch = schema(table).find{|k,v| k.to_s == op[:name].to_s}.last
@@ -290,7 +290,9 @@ module Sequel
290
290
  end
291
291
  "ALTER TABLE #{quote_schema_table(table)} ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(:type=>type)} #{'NOT ' unless op[:null]}NULL"
292
292
  when :set_column_default
293
- "ALTER TABLE #{quote_schema_table(table)} ADD CONSTRAINT #{quote_identifier("sequel_#{table}_#{op[:name]}_def")} DEFAULT #{literal(op[:default])} FOR #{quote_identifier(op[:name])}"
293
+ sqls = []
294
+ add_drop_default_constraint_sql(sqls, table, op[:name]) unless op[:skip_drop_default]
295
+ sqls << "ALTER TABLE #{quote_schema_table(table)} ADD CONSTRAINT #{quote_identifier("sequel_#{table}_#{op[:name]}_def")} DEFAULT #{literal(op[:default])} FOR #{quote_identifier(op[:name])}"
294
296
  else
295
297
  super(table, op)
296
298
  end
@@ -427,20 +429,19 @@ module Sequel
427
429
  m = output_identifier_meth(opts[:dataset])
428
430
  m2 = input_identifier_meth(opts[:dataset])
429
431
  tn = m2.call(table_name.to_s)
430
- table_id = get(Sequel.function(:object_id, tn))
431
432
  info_sch_sch = opts[:information_schema_schema]
432
433
  inf_sch_qual = lambda{|s| info_sch_sch ? Sequel.qualify(info_sch_sch, s) : Sequel[s]}
433
- sys_qual = lambda{|s| info_sch_sch ? Sequel.qualify(info_sch_sch, Sequel.qualify(Sequel.lit(''), s)) : Sequel[s]}
434
+ table_id = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:objects])).where(:name => tn).select_map(:object_id).first
434
435
 
435
- identity_cols = metadata_dataset.from(Sequel.lit('[sys].[columns]')).
436
+ identity_cols = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:columns])).
436
437
  where(:object_id=>table_id, :is_identity=>true).
437
438
  select_map(:name)
438
439
 
439
- pk_index_id = metadata_dataset.from(sys_qual.call(Sequel.lit('sysindexes'))).
440
+ pk_index_id = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:sysindexes])).
440
441
  where(:id=>table_id, :indid=>1..254){{(status & 2048)=>2048}}.
441
442
  get(:indid)
442
- pk_cols = metadata_dataset.from(sys_qual.call(Sequel.lit('sysindexkeys')).as(:sik)).
443
- join(sys_qual.call(Sequel.lit('syscolumns')).as(:sc), :id=>:id, :colid=>:colid).
443
+ pk_cols = metadata_dataset.from(inf_sch_qual.call(Sequel[:sys][:sysindexkeys]).as(:sik)).
444
+ join(inf_sch_qual.call(Sequel[:sys][:syscolumns]).as(:sc), :id=>:id, :colid=>:colid).
444
445
  where{{sik[:id]=>table_id, sik[:indid]=>pk_index_id}}.
445
446
  select_order_map{sc[:name]}
446
447
 
@@ -765,8 +766,9 @@ module Sequel
765
766
  output(nil, [SQL::QualifiedIdentifier.new(:inserted, first_primary_key)])._import(columns, values, opts)
766
767
  elsif @opts[:output]
767
768
  statements = multi_insert_sql(columns, values)
769
+ ds = naked
768
770
  @db.transaction(opts.merge(:server=>@opts[:server])) do
769
- statements.map{|st| with_sql(st)}
771
+ statements.map{|st| ds.with_sql(st)}
770
772
  end.first.map{|v| v.length == 1 ? v.values.first : v}
771
773
  else
772
774
  super
@@ -97,13 +97,17 @@ module Sequel
97
97
  # Add an exclusion constraint when creating the table. Elements should be
98
98
  # an array of 2 element arrays, with the first element being the column or
99
99
  # expression the exclusion constraint is applied to, and the second element
100
- # being the operator to use for the column/expression to check for exclusion.
101
- #
102
- # Example:
100
+ # being the operator to use for the column/expression to check for exclusion:
103
101
  #
104
102
  # exclude([[:col1, '&&'], [:col2, '=']])
105
103
  # # EXCLUDE USING gist (col1 WITH &&, col2 WITH =)
106
104
  #
105
+ # To use a custom operator class, you need to use Sequel.lit with the expression
106
+ # and operator class:
107
+ #
108
+ # exclude([[Sequel.lit('col1 inet_ops'), '&&'], [:col2, '=']])
109
+ # # EXCLUDE USING gist (col1 inet_ops WITH &&, col2 WITH =)
110
+ #
107
111
  # Options supported:
108
112
  #
109
113
  # :name :: Name the constraint with the given name (useful if you may
@@ -147,10 +151,10 @@ module Sequel
147
151
  SELECT_CUSTOM_SEQUENCE_SQL = (<<-end_sql
148
152
  SELECT name.nspname AS "schema",
149
153
  CASE
150
- WHEN split_part(def.adsrc, '''', 2) ~ '.' THEN
151
- substr(split_part(def.adsrc, '''', 2),
152
- strpos(split_part(def.adsrc, '''', 2), '.')+1)
153
- ELSE split_part(def.adsrc, '''', 2)
154
+ WHEN split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2) ~ '.' THEN
155
+ substr(split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2),
156
+ strpos(split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2), '.')+1)
157
+ ELSE split_part(pg_get_expr(def.adbin, attr.attrelid), '''', 2)
154
158
  END AS "sequence"
155
159
  FROM pg_class t
156
160
  JOIN pg_namespace name ON (t.relnamespace = name.oid)
@@ -158,7 +162,7 @@ module Sequel
158
162
  JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
159
163
  JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
160
164
  WHERE cons.contype = 'p'
161
- AND def.adsrc ~* 'nextval'
165
+ AND pg_get_expr(def.adbin, attr.attrelid) ~* 'nextval'
162
166
  end_sql
163
167
  ).strip.gsub(/\s+/, ' ').freeze
164
168
 
@@ -836,10 +840,14 @@ module Sequel
836
840
  # default value is given.
837
841
  def column_definition_default_sql(sql, column)
838
842
  super
839
- if !column[:serial] && !['serial', 'bigserial'].include?(column[:type].to_s) && !column[:default] && (identity = column[:identity])
840
- sql << " GENERATED "
841
- sql << (identity == :always ? "ALWAYS" : "BY DEFAULT")
842
- sql << " AS IDENTITY"
843
+ if !column[:serial] && !['serial', 'bigserial'].include?(column[:type].to_s) && !column[:default]
844
+ if (identity = column[:identity])
845
+ sql << " GENERATED "
846
+ sql << (identity == :always ? "ALWAYS" : "BY DEFAULT")
847
+ sql << " AS IDENTITY"
848
+ elsif (generated = column[:generated_always_as])
849
+ sql << " GENERATED ALWAYS AS (#{literal(generated)}) STORED"
850
+ end
843
851
  end
844
852
  end
845
853
 
@@ -1798,6 +1806,16 @@ module Sequel
1798
1806
  end
1799
1807
  end
1800
1808
 
1809
+ # Include aliases when inserting into a single table on PostgreSQL 9.5+.
1810
+ def insert_into_sql(sql)
1811
+ sql << " INTO "
1812
+ if (f = @opts[:from]) && f.length == 1
1813
+ identifier_append(sql, server_version >= 90500 ? f.first : unaliased_identifier(f.first))
1814
+ else
1815
+ source_list_append(sql, f)
1816
+ end
1817
+ end
1818
+
1801
1819
  # Return the primary key to use for RETURNING in an INSERT statement
1802
1820
  def insert_pk
1803
1821
  if (f = opts[:from]) && !f.empty?
@@ -1920,6 +1938,18 @@ module Sequel
1920
1938
  opts[:with].any?{|w| w[:recursive]} ? "WITH RECURSIVE " : super
1921
1939
  end
1922
1940
 
1941
+ # Support WITH AS [NOT] MATERIALIZED if :materialized option is used.
1942
+ def select_with_sql_prefix(sql, w)
1943
+ super
1944
+
1945
+ case w[:materialized]
1946
+ when true
1947
+ sql << "MATERIALIZED "
1948
+ when false
1949
+ sql << "NOT MATERIALIZED "
1950
+ end
1951
+ end
1952
+
1923
1953
  # The version of the database server
1924
1954
  def server_version
1925
1955
  db.server_version(@opts[:server])
@@ -10,6 +10,10 @@ module Sequel
10
10
  def self.mock_adapter_setup(db)
11
11
  db.instance_exec do
12
12
  @sqlite_version = 30903
13
+
14
+ def schema_parse_table(*)
15
+ []
16
+ end
13
17
  end
14
18
  end
15
19
 
@@ -60,7 +64,7 @@ module Sequel
60
64
  def foreign_key_list(table, opts=OPTS)
61
65
  m = output_identifier_meth
62
66
  h = {}
63
- metadata_dataset.with_sql("PRAGMA foreign_key_list(?)", input_identifier_meth.call(table)).each do |row|
67
+ _foreign_key_list_ds(table).each do |row|
64
68
  if r = h[row[:id]]
65
69
  r[:columns] << m.call(row[:from])
66
70
  r[:key] << m.call(row[:to]) if r[:key]
@@ -173,6 +177,16 @@ module Sequel
173
177
 
174
178
  private
175
179
 
180
+ # Dataset used for parsing foreign key lists
181
+ def _foreign_key_list_ds(table)
182
+ metadata_dataset.with_sql("PRAGMA foreign_key_list(?)", input_identifier_meth.call(table))
183
+ end
184
+
185
+ # Dataset used for parsing schema
186
+ def _parse_pragma_ds(table_name, opts)
187
+ metadata_dataset.with_sql("PRAGMA table_info(?)", input_identifier_meth(opts[:dataset]).call(table_name))
188
+ end
189
+
176
190
  # Run all alter_table commands in a transaction. This is technically only
177
191
  # needed for drop column.
178
192
  def apply_alter_table(table, ops)
@@ -445,7 +459,7 @@ module Sequel
445
459
  # Parse the output of the table_info pragma
446
460
  def parse_pragma(table_name, opts)
447
461
  pks = 0
448
- sch = metadata_dataset.with_sql("PRAGMA table_info(?)", input_identifier_meth(opts[:dataset]).call(table_name)).map do |row|
462
+ sch = _parse_pragma_ds(table_name, opts).map do |row|
449
463
  row.delete(:cid)
450
464
  row[:allow_null] = row.delete(:notnull).to_i == 0
451
465
  row[:default] = row.delete(:dflt_value)
@@ -16,6 +16,18 @@ module Sequel
16
16
  c = TinyTds::Client.new(opts)
17
17
  c.query_options.merge!(:cache_rows=>false)
18
18
 
19
+ if opts[:ansi]
20
+ sql = %w(
21
+ ANSI_NULLS
22
+ ANSI_PADDING
23
+ ANSI_WARNINGS
24
+ ANSI_NULL_DFLT_ON
25
+ QUOTED_IDENTIFIER
26
+ CONCAT_NULL_YIELDS_NULL
27
+ ).map{|v| "SET #{v} ON"}.join(";")
28
+ log_connection_yield(sql, c){c.execute(sql)}
29
+ end
30
+
19
31
  if (ts = opts[:textsize])
20
32
  sql = "SET TEXTSIZE #{typecast_value_integer(ts)}"
21
33
  log_connection_yield(sql, c){c.execute(sql)}
@@ -55,7 +55,7 @@ module Sequel
55
55
  NotNullConstraintViolation
56
56
  when 1062
57
57
  UniqueConstraintViolation
58
- when 1451, 1452
58
+ when 1451, 1452, 1216, 1217
59
59
  ForeignKeyConstraintViolation
60
60
  when 4025
61
61
  CheckConstraintViolation
@@ -35,7 +35,7 @@ module Sequel
35
35
  # Yield to the block, logging any errors at error level to all loggers,
36
36
  # and all other queries with the duration at warn or info level.
37
37
  def log_connection_yield(sql, conn, args=nil)
38
- return yield if @loggers.empty?
38
+ return yield if skip_logging?
39
39
  sql = "#{connection_info(conn) if conn && log_connection_info}#{sql}#{"; #{args.inspect}" if args}"
40
40
  timer = Sequel.start_timer
41
41
 
@@ -58,6 +58,12 @@ module Sequel
58
58
 
59
59
  private
60
60
 
61
+ # Determine if logging should be skipped. Defaults to true if no loggers
62
+ # have been specified.
63
+ def skip_logging?
64
+ @loggers.empty?
65
+ end
66
+
61
67
  # String including information about the connection, for use when logging
62
68
  # connection info.
63
69
  def connection_info(conn)
@@ -110,6 +110,9 @@ module Sequel
110
110
  # yet exist on referenced table (but will exist before the transaction commits).
111
111
  # Basically it adds DEFERRABLE INITIALLY DEFERRED on key creation.
112
112
  # If you use :immediate as the value, uses DEFERRABLE INITIALLY IMMEDIATE.
113
+ # :generated_always_as :: Specify a GENERATED ALWAYS AS column expression,
114
+ # if generated columns are supported (PostgreSQL 12+, MariaDB 5.2.0+,
115
+ # and MySQL 5.7.6+).
113
116
  # :index :: Create an index on this column. If given a hash, use the hash as the
114
117
  # options for the index.
115
118
  # :key :: For foreign key columns, the column in the associated table
@@ -126,15 +129,21 @@ module Sequel
126
129
  # be used if you have a single, nonautoincrementing primary key column
127
130
  # (use the primary_key method in that case).
128
131
  # :primary_key_constraint_name :: The name to give the primary key constraint
132
+ # :primary_key_deferrable :: Similar to :deferrable, but for the primary key constraint
133
+ # if :primary_key is used.
129
134
  # :type :: Overrides the type given as the argument. Generally not used by column
130
135
  # itself, but can be passed as an option to other methods that call column.
131
136
  # :unique :: Mark the column as unique, generally has the same effect as
132
137
  # creating a unique index on the column.
133
138
  # :unique_constraint_name :: The name to give the unique key constraint
139
+ # :unique_deferrable :: Similar to :deferrable, but for the unique constraint if :unique
140
+ # is used.
141
+ #
142
+ # PostgreSQL specific options:
143
+ #
144
+ # :identity :: Create an identity column.
134
145
  #
135
146
  # MySQL specific options:
136
- # :generated_always_as :: Specify a GENERATED ALWAYS AS column expression,
137
- # if generated columns are supported.
138
147
  # :generated_type :: Set the type of column when using :generated_always_as,
139
148
  # should be :virtual or :stored to force a type.
140
149
  def column(name, type, opts = OPTS)
@@ -586,6 +586,7 @@ module Sequel
586
586
  sql << " CONSTRAINT #{quote_identifier(name)}"
587
587
  end
588
588
  sql << ' PRIMARY KEY'
589
+ constraint_deferrable_sql_append(sql, column[:primary_key_deferrable])
589
590
  end
590
591
  end
591
592
 
@@ -606,6 +607,7 @@ module Sequel
606
607
  sql << " CONSTRAINT #{quote_identifier(name)}"
607
608
  end
608
609
  sql << ' UNIQUE'
610
+ constraint_deferrable_sql_append(sql, column[:unique_deferrable])
609
611
  end
610
612
  end
611
613
 
@@ -20,8 +20,10 @@ module Sequel
20
20
  # old_posts = posts.where{stamp < Date.today - 7}
21
21
  # davids_old_posts = davids_posts.where{stamp < Date.today - 7}
22
22
  #
23
- # Datasets are Enumerable objects, so they can be manipulated using any
24
- # of the Enumerable methods, such as map, inject, etc.
23
+ # Datasets are Enumerable objects, so they can be manipulated using many
24
+ # of the Enumerable methods, such as +map+ and +inject+. Note that there are some methods
25
+ # that Dataset defines that override methods defined in Enumerable and result in different
26
+ # behavior, such as +select+ and +group_by+.
25
27
  #
26
28
  # For more information, see the {"Dataset Basics" guide}[rdoc-ref:doc/dataset_basics.rdoc].
27
29
  class Dataset
@@ -333,6 +333,7 @@ module Sequel
333
333
  # after every 50 records.
334
334
  # :return :: When this is set to :primary_key, returns an array of
335
335
  # autoincremented primary key values for the rows inserted.
336
+ # This does not have an effect if +values+ is a Dataset.
336
337
  # :server :: Set the server/shard to use for the transaction and insert
337
338
  # queries.
338
339
  # :slice :: Same as :commit_every, :commit_every takes precedence.
@@ -1069,7 +1070,7 @@ module Sequel
1069
1070
 
1070
1071
  # Set the server to use to :default unless it is already set in the passed opts
1071
1072
  def default_server_opts(opts)
1072
- if @db.sharded?
1073
+ if @db.sharded? && !opts.has_key?(:server)
1073
1074
  opts = Hash[opts]
1074
1075
  opts[:server] = @opts[:server] || :default
1075
1076
  end
@@ -1080,7 +1081,7 @@ module Sequel
1080
1081
  # :read_only server unless a specific server is set.
1081
1082
  def execute(sql, opts=OPTS, &block)
1082
1083
  db = @db
1083
- if db.sharded?
1084
+ if db.sharded? && !opts.has_key?(:server)
1084
1085
  opts = Hash[opts]
1085
1086
  opts[:server] = @opts[:server] || (@opts[:lock] ? :default : :read_only)
1086
1087
  opts
@@ -1062,6 +1062,10 @@ module Sequel
1062
1062
  # :args :: Specify the arguments/columns for the CTE, should be an array of symbols.
1063
1063
  # :recursive :: Specify that this is a recursive CTE
1064
1064
  #
1065
+ # PostgreSQL Specific Options:
1066
+ # :materialized :: Set to false to force inlining of the CTE, or true to force not inlining
1067
+ # the CTE (PostgreSQL 12+).
1068
+ #
1065
1069
  # DB[:items].with(:items, DB[:syx].where(Sequel[:name].like('A%')))
1066
1070
  # # WITH items AS (SELECT * FROM syx WHERE (name LIKE 'A%' ESCAPE '\')) SELECT * FROM items
1067
1071
  def with(name, dataset, opts=OPTS)
@@ -1510,13 +1510,7 @@ module Sequel
1510
1510
  comma = ', '
1511
1511
  ws.each do |w|
1512
1512
  sql << comma if c
1513
- quote_identifier_append(sql, w[:name])
1514
- if args = w[:args]
1515
- sql << '('
1516
- identifier_list_append(sql, args)
1517
- sql << ')'
1518
- end
1519
- sql << ' AS '
1513
+ select_with_sql_prefix(sql, w)
1520
1514
  literal_dataset_append(sql, w[:dataset])
1521
1515
  c ||= true
1522
1516
  end
@@ -1530,6 +1524,16 @@ module Sequel
1530
1524
  "WITH "
1531
1525
  end
1532
1526
 
1527
+ def select_with_sql_prefix(sql, w)
1528
+ quote_identifier_append(sql, w[:name])
1529
+ if args = w[:args]
1530
+ sql << '('
1531
+ identifier_list_append(sql, args)
1532
+ sql << ')'
1533
+ end
1534
+ sql << ' AS '
1535
+ end
1536
+
1533
1537
  # Whether the symbol cache should be skipped when literalizing the dataset
1534
1538
  def skip_symbol_cache?
1535
1539
  @opts[:skip_symbol_cache]
@@ -2,18 +2,21 @@
2
2
  #
3
3
  # Allows the use of named timezones via TZInfo (requires tzinfo).
4
4
  # Forces the use of DateTime as Sequel's datetime_class, since
5
- # ruby's Time class doesn't support timezones other than local
6
- # and UTC.
5
+ # historically, Ruby's Time class doesn't support timezones other
6
+ # than local and UTC. To continue using Ruby's Time class when using
7
+ # the named_timezones extension:
8
+ #
9
+ # # Load the extension
10
+ # Sequel.extension :named_timezones
11
+ #
12
+ # # Set Sequel.datetime_class back to Time
13
+ # Sequel.datetime_class = Time
7
14
  #
8
15
  # This allows you to either pass strings or TZInfo::Timezone
9
16
  # instance to Sequel.database_timezone=, application_timezone=, and
10
17
  # typecast_timezone=. If a string is passed, it is converted to a
11
18
  # TZInfo::Timezone using TZInfo::Timezone.get.
12
19
  #
13
- # To load the extension:
14
- #
15
- # Sequel.extension :named_timezones
16
- #
17
20
  # Let's say you have the database server in New York and the
18
21
  # application server in Los Angeles. For historical reasons, data
19
22
  # is stored in local New York time, but the application server only
@@ -37,7 +40,8 @@
37
40
  # Note that typecasting from the database timezone to the application
38
41
  # timezone when fetching rows is dependent on the database adapter,
39
42
  # and only works on adapters where Sequel itself does the conversion.
40
- # It should work on mysql, postgres, sqlite, ibmdb, and jdbc.
43
+ # It should work with the mysql, postgres, sqlite, ibmdb, and jdbc
44
+ # adapters.
41
45
  #
42
46
  # Related module: Sequel::NamedTimezones
43
47
 
@@ -63,10 +67,48 @@ module Sequel
63
67
 
64
68
  private
65
69
 
66
- # Handle both TZInfo 1 and TZInfo 2
67
- if defined?(TZInfo::VERSION) && TZInfo::VERSION > '2'
70
+ if RUBY_VERSION >= '2.6'
71
+ # Convert the given input Time (which must be in UTC) to the given input timezone,
72
+ # which should be a TZInfo::Timezone instance.
73
+ def convert_input_time_other(v, input_timezone)
74
+ Time.new(v.year, v.mon, v.day, v.hour, v.min, (v.sec + Rational(v.nsec, 1000000000)), input_timezone)
75
+ rescue TZInfo::AmbiguousTime
76
+ raise unless disamb = tzinfo_disambiguator_for(v)
77
+ period = input_timezone.period_for_local(v, &disamb)
78
+ offset = period.utc_total_offset
79
+ Time.at(v.to_i - offset, :in => input_timezone)
80
+ end
81
+
82
+ # Convert the given input Time to the given output timezone,
83
+ # which should be a TZInfo::Timezone instance.
84
+ def convert_output_time_other(v, output_timezone)
85
+ Time.at(v.to_i, :in => output_timezone)
86
+ end
87
+ else
68
88
  # :nodoc:
69
89
  # :nocov:
90
+ def convert_input_time_other(v, input_timezone)
91
+ local_offset = input_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset
92
+ Time.new(1970, 1, 1, 0, 0, 0, local_offset) + v.to_i
93
+ end
94
+
95
+ if defined?(TZInfo::VERSION) && TZInfo::VERSION > '2'
96
+ def convert_output_time_other(v, output_timezone)
97
+ v = output_timezone.utc_to_local(v.getutc)
98
+ local_offset = output_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset
99
+ Time.new(1970, 1, 1, 0, 0, 0, local_offset) + v.to_i + local_offset
100
+ end
101
+ else
102
+ def convert_output_time_other(v, output_timezone)
103
+ v = output_timezone.utc_to_local(v.getutc)
104
+ local_offset = output_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset
105
+ Time.new(1970, 1, 1, 0, 0, 0, local_offset) + v.to_i
106
+ end
107
+ end
108
+ end
109
+
110
+ # Handle both TZInfo 1 and TZInfo 2
111
+ if defined?(TZInfo::VERSION) && TZInfo::VERSION > '2'
70
112
  def convert_input_datetime_other(v, input_timezone)
71
113
  local_offset = Rational(input_timezone.period_for_local(v, &tzinfo_disambiguator_for(v)).utc_total_offset, 86400)
72
114
  (v - local_offset).new_offset(local_offset)