sequel 4.33.0 → 4.34.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG +22 -0
- data/doc/release_notes/4.34.0.txt +86 -0
- data/doc/testing.rdoc +1 -0
- data/doc/validations.rdoc +12 -1
- data/lib/sequel/adapters/ado.rb +1 -1
- data/lib/sequel/adapters/amalgalite.rb +1 -1
- data/lib/sequel/adapters/cubrid.rb +1 -1
- data/lib/sequel/adapters/do.rb +1 -1
- data/lib/sequel/adapters/ibmdb.rb +1 -1
- data/lib/sequel/adapters/jdbc.rb +1 -1
- data/lib/sequel/adapters/mock.rb +1 -1
- data/lib/sequel/adapters/mysql.rb +1 -1
- data/lib/sequel/adapters/mysql2.rb +1 -1
- data/lib/sequel/adapters/odbc.rb +1 -1
- data/lib/sequel/adapters/oracle.rb +1 -1
- data/lib/sequel/adapters/postgres.rb +1 -1
- data/lib/sequel/adapters/shared/mssql.rb +1 -1
- data/lib/sequel/adapters/sqlanywhere.rb +1 -1
- data/lib/sequel/adapters/sqlite.rb +1 -1
- data/lib/sequel/adapters/swift.rb +1 -1
- data/lib/sequel/adapters/tinytds.rb +2 -2
- data/lib/sequel/connection_pool.rb +2 -0
- data/lib/sequel/connection_pool/sharded_single.rb +1 -1
- data/lib/sequel/connection_pool/sharded_threaded.rb +17 -4
- data/lib/sequel/connection_pool/single.rb +1 -1
- data/lib/sequel/connection_pool/threaded.rb +17 -4
- data/lib/sequel/database/misc.rb +5 -1
- data/lib/sequel/dataset.rb +4 -0
- data/lib/sequel/dataset/actions.rb +28 -15
- data/lib/sequel/extensions/columns_introspection.rb +1 -1
- data/lib/sequel/extensions/duplicate_columns_handler.rb +87 -0
- data/lib/sequel/extensions/migration.rb +9 -7
- data/lib/sequel/extensions/pg_range.rb +73 -14
- data/lib/sequel/model/base.rb +2 -2
- data/lib/sequel/plugins/dataset_associations.rb +21 -1
- data/lib/sequel/plugins/prepared_statements_safe.rb +2 -1
- data/lib/sequel/plugins/update_or_create.rb +1 -1
- data/lib/sequel/plugins/validation_helpers.rb +7 -0
- data/lib/sequel/version.rb +1 -1
- data/spec/adapters/postgres_spec.rb +14 -0
- data/spec/adapters/spec_helper.rb +6 -0
- data/spec/core/connection_pool_spec.rb +30 -3
- data/spec/core/database_spec.rb +2 -0
- data/spec/core/dataset_spec.rb +8 -0
- data/spec/extensions/dataset_associations_spec.rb +32 -0
- data/spec/extensions/duplicate_columns_handler_spec.rb +110 -0
- data/spec/extensions/pg_range_spec.rb +40 -0
- data/spec/extensions/prepared_statements_safe_spec.rb +1 -1
- data/spec/extensions/validation_helpers_spec.rb +11 -0
- data/spec/integration/associations_test.rb +22 -8
- data/spec/integration/dataset_test.rb +10 -0
- data/spec/integration/eager_loader_test.rb +1 -1
- data/spec/integration/plugin_test.rb +3 -3
- data/spec/integration/spec_helper.rb +4 -0
- metadata +6 -2
data/lib/sequel/database/misc.rb
CHANGED
@@ -108,6 +108,7 @@ module Sequel
|
|
108
108
|
# :identifier_output_method :: A string method symbol to call on identifiers coming from the database.
|
109
109
|
# :logger :: A specific logger to use.
|
110
110
|
# :loggers :: An array of loggers to use.
|
111
|
+
# :name :: A name to use for the Database object.
|
111
112
|
# :preconnect :: Whether to automatically connect to the maximum number of servers.
|
112
113
|
# :quote_identifiers :: Whether to quote identifiers.
|
113
114
|
# :servers :: A hash specifying a server/shard specific options, keyed by shard symbol .
|
@@ -149,7 +150,10 @@ module Sequel
|
|
149
150
|
Sequel.synchronize{::Sequel::DATABASES.push(self)}
|
150
151
|
end
|
151
152
|
Sequel::Database.run_after_initialize(self)
|
152
|
-
|
153
|
+
if typecast_value_boolean(@opts[:preconnect]) && @pool.respond_to?(:preconnect, true)
|
154
|
+
concurrent = typecast_value_string(@opts[:preconnect]) == "concurrently"
|
155
|
+
@pool.send(:preconnect, concurrent)
|
156
|
+
end
|
153
157
|
end
|
154
158
|
|
155
159
|
# If a transaction is not currently in process, yield to the block immediately.
|
data/lib/sequel/dataset.rb
CHANGED
@@ -36,6 +36,10 @@ module Sequel
|
|
36
36
|
include SQL::NumericMethods
|
37
37
|
include SQL::OrderMethods
|
38
38
|
include SQL::StringMethods
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
attr_writer :columns
|
39
43
|
end
|
40
44
|
|
41
45
|
require(%w"query actions features graph prepared_statements misc mutation sql placeholder_literalizer", 'dataset')
|
@@ -81,7 +81,7 @@ module Sequel
|
|
81
81
|
# DB[:table].columns!
|
82
82
|
# # => [:id, :name]
|
83
83
|
def columns!
|
84
|
-
|
84
|
+
self.columns = nil
|
85
85
|
columns
|
86
86
|
end
|
87
87
|
|
@@ -558,7 +558,9 @@ module Sequel
|
|
558
558
|
end
|
559
559
|
|
560
560
|
# Returns a hash with key_column values as keys and value_column values as
|
561
|
-
# values. Similar to to_hash, but only selects the columns given.
|
561
|
+
# values. Similar to to_hash, but only selects the columns given. Like
|
562
|
+
# to_hash, it accepts an optional :hash parameter, into which entries will
|
563
|
+
# be merged.
|
562
564
|
#
|
563
565
|
# DB[:table].select_hash(:id, :name) # SELECT id, name FROM table
|
564
566
|
# # => {1=>'a', 2=>'b', ...}
|
@@ -572,12 +574,13 @@ module Sequel
|
|
572
574
|
# When using this method, you must be sure that each expression has an alias
|
573
575
|
# that Sequel can determine. Usually you can do this by calling the #as method
|
574
576
|
# on the expression and providing an alias.
|
575
|
-
def select_hash(key_column, value_column)
|
576
|
-
_select_hash(:to_hash, key_column, value_column)
|
577
|
+
def select_hash(key_column, value_column, opts = OPTS)
|
578
|
+
_select_hash(:to_hash, key_column, value_column, opts)
|
577
579
|
end
|
578
580
|
|
579
581
|
# Returns a hash with key_column values as keys and an array of value_column values.
|
580
|
-
# Similar to to_hash_groups, but only selects the columns given.
|
582
|
+
# Similar to to_hash_groups, but only selects the columns given. Like to_hash_groups,
|
583
|
+
# it accepts an optional :hash parameter, into which entries will be merged.
|
581
584
|
#
|
582
585
|
# DB[:table].select_hash_groups(:name, :id) # SELECT id, name FROM table
|
583
586
|
# # => {'a'=>[1, 4, ...], 'b'=>[2, ...], ...}
|
@@ -591,8 +594,8 @@ module Sequel
|
|
591
594
|
# When using this method, you must be sure that each expression has an alias
|
592
595
|
# that Sequel can determine. Usually you can do this by calling the #as method
|
593
596
|
# on the expression and providing an alias.
|
594
|
-
def select_hash_groups(key_column, value_column)
|
595
|
-
_select_hash(:to_hash_groups, key_column, value_column)
|
597
|
+
def select_hash_groups(key_column, value_column, opts = OPTS)
|
598
|
+
_select_hash(:to_hash_groups, key_column, value_column, opts)
|
596
599
|
end
|
597
600
|
|
598
601
|
# Selects the column given (either as an argument or as a block), and
|
@@ -715,10 +718,15 @@ module Sequel
|
|
715
718
|
#
|
716
719
|
# DB[:table].to_hash([:id, :name]) # SELECT * FROM table
|
717
720
|
# # {[1, 'Jim']=>{:id=>1, :name=>'Jim'}, [2, 'Bob'=>{:id=>2, :name=>'Bob'}, ...}
|
718
|
-
|
719
|
-
|
721
|
+
#
|
722
|
+
# This method accepts an optional :hash parameter (which can be a hash with
|
723
|
+
# a default value, a hash with a default proc, or any object that supports
|
724
|
+
# #[] and #[]=) into which entries will be merged. The default behavior is
|
725
|
+
# to start with a new, empty hash.
|
726
|
+
def to_hash(key_column, value_column = nil, opts = OPTS)
|
727
|
+
h = opts[:hash] || {}
|
720
728
|
if value_column
|
721
|
-
return naked.to_hash(key_column, value_column) if row_proc
|
729
|
+
return naked.to_hash(key_column, value_column, opts) if row_proc
|
722
730
|
if value_column.is_a?(Array)
|
723
731
|
if key_column.is_a?(Array)
|
724
732
|
each{|r| h[r.values_at(*key_column)] = r.values_at(*value_column)}
|
@@ -758,10 +766,15 @@ module Sequel
|
|
758
766
|
#
|
759
767
|
# DB[:table].to_hash_groups([:first, :middle]) # SELECT * FROM table
|
760
768
|
# # {['Jim', 'Bob']=>[{:id=>1, :first=>'Jim', :middle=>'Bob', :last=>'Smith'}, ...], ...}
|
761
|
-
|
762
|
-
|
769
|
+
#
|
770
|
+
# This method accepts an optional :hash parameter (which can be a hash with
|
771
|
+
# a default value, a hash with a default proc, or any object that supports
|
772
|
+
# #[] and #[]=) into which entries will be merged. The default behavior is
|
773
|
+
# to start with a new, empty hash.
|
774
|
+
def to_hash_groups(key_column, value_column = nil, opts = OPTS)
|
775
|
+
h = opts[:hash] || {}
|
763
776
|
if value_column
|
764
|
-
return naked.to_hash_groups(key_column, value_column) if row_proc
|
777
|
+
return naked.to_hash_groups(key_column, value_column, opts) if row_proc
|
765
778
|
if value_column.is_a?(Array)
|
766
779
|
if key_column.is_a?(Array)
|
767
780
|
each{|r| (h[r.values_at(*key_column)] ||= []) << r.values_at(*value_column)}
|
@@ -896,9 +909,9 @@ module Sequel
|
|
896
909
|
end
|
897
910
|
|
898
911
|
# Internals of +select_hash+ and +select_hash_groups+
|
899
|
-
def _select_hash(meth, key_column, value_column)
|
912
|
+
def _select_hash(meth, key_column, value_column, opts=OPTS)
|
900
913
|
select(*(key_column.is_a?(Array) ? key_column : [key_column]) + (value_column.is_a?(Array) ? value_column : [value_column])).
|
901
|
-
send(meth, hash_key_symbols(key_column), hash_key_symbols(value_column))
|
914
|
+
send(meth, hash_key_symbols(key_column), hash_key_symbols(value_column), opts)
|
902
915
|
end
|
903
916
|
|
904
917
|
# Internals of +select_map+ and +select_order_map+
|
@@ -0,0 +1,87 @@
|
|
1
|
+
# frozen-string-literal: true
|
2
|
+
#
|
3
|
+
# The duplicate_columns_handler extension allows you to customize handling of
|
4
|
+
# duplicate column names in your queries on a per-database or per-dataset level.
|
5
|
+
#
|
6
|
+
# For example, you may want to raise an exception if you join 2 tables together
|
7
|
+
# which contains a column that will override another columns.
|
8
|
+
#
|
9
|
+
# To use the extension, you need to load the extension into the database:
|
10
|
+
#
|
11
|
+
# DB.extension :duplicate_columns_handler
|
12
|
+
#
|
13
|
+
# A database option is introduced: :on_duplicate_columns. It accepts a Symbol
|
14
|
+
# or any object that responds to :call.
|
15
|
+
#
|
16
|
+
# :on_duplicate_columns => :raise
|
17
|
+
# :on_duplicate_columns => :warn
|
18
|
+
# :on_duplicate_columns => :ignore
|
19
|
+
# :on_duplicate_columns => proc { |columns| arbitrary_condition? ? :raise : :warn }
|
20
|
+
#
|
21
|
+
# You may also configure duplicate columns handling for a specific dataset:
|
22
|
+
#
|
23
|
+
# ds.on_duplicate_columns(:warn)
|
24
|
+
# ds.on_duplicate_columns(:raise)
|
25
|
+
# ds.on_duplicate_columns(:ignore)
|
26
|
+
# ds.on_duplicate_columns { |columns| arbitrary_condition? ? :raise : :warn }
|
27
|
+
# ds.on_duplicate_columns(proc { |columns| arbitrary_condition? ? :raise : :warn })
|
28
|
+
#
|
29
|
+
# If :raise is specified, a Sequel::DuplicateColumnError is raised.
|
30
|
+
# If :warn is specified, you will receive a warning via `warn`.
|
31
|
+
# If a callable is specified, it will be called.
|
32
|
+
# If no on_duplicate_columns is specified, the default is :warn.
|
33
|
+
#
|
34
|
+
# Related module: Sequel::DuplicateColumnsHandler
|
35
|
+
|
36
|
+
module Sequel
|
37
|
+
module DuplicateColumnsHandler
|
38
|
+
# Customize handling of duplicate columns for this dataset.
|
39
|
+
def on_duplicate_columns(handler = (raise Error, "Must provide either an argument or a block to on_duplicate_columns" unless block_given?; nil), &block)
|
40
|
+
raise Error, "Cannot provide both an argument and a block to on_duplicate_columns" if handler && block
|
41
|
+
clone(:on_duplicate_columns=>handler||block)
|
42
|
+
end
|
43
|
+
|
44
|
+
# Override the attr_writer to check for duplicate columns, and call
|
45
|
+
# handle_duplicate_columns if necessary.
|
46
|
+
def columns=(cols)
|
47
|
+
if cols && cols.uniq.size != cols.size
|
48
|
+
handle_duplicate_columns(cols)
|
49
|
+
end
|
50
|
+
@columns = cols
|
51
|
+
end
|
52
|
+
|
53
|
+
private
|
54
|
+
|
55
|
+
# Invoke the appropriate behavior when duplicate columns are present.
|
56
|
+
def handle_duplicate_columns(cols)
|
57
|
+
message = "One or more duplicate columns present in #{cols.inspect}"
|
58
|
+
|
59
|
+
case duplicate_columns_handler_type(cols)
|
60
|
+
when :raise
|
61
|
+
raise DuplicateColumnError, message
|
62
|
+
when :warn
|
63
|
+
warn message
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
# Try to find dataset option for on_duplicate_columns. If not present on the dataset,
|
68
|
+
# use the on_duplicate_columns option on the database. If not present on the database,
|
69
|
+
# default to :warn.
|
70
|
+
def duplicate_columns_handler_type(cols)
|
71
|
+
handler = opts.fetch(:on_duplicate_columns){db.opts.fetch(:on_duplicate_columns, :warn)}
|
72
|
+
|
73
|
+
if handler.respond_to?(:call)
|
74
|
+
handler.call(cols)
|
75
|
+
else
|
76
|
+
handler
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
# Error which is raised when duplicate columns are present in a dataset which is configured
|
82
|
+
# to :raise on_duplicate_columns.
|
83
|
+
class DuplicateColumnError < Error
|
84
|
+
end
|
85
|
+
|
86
|
+
Dataset.register_extension(:duplicate_columns_handler, Sequel::DuplicateColumnsHandler)
|
87
|
+
end
|
@@ -618,13 +618,15 @@ module Sequel
|
|
618
618
|
# so that each number in the array is the migration version
|
619
619
|
# that will be in affect after the migration is run.
|
620
620
|
def version_numbers
|
621
|
-
|
622
|
-
|
623
|
-
|
624
|
-
|
625
|
-
|
626
|
-
|
627
|
-
|
621
|
+
@version_numbers ||= begin
|
622
|
+
versions = files.
|
623
|
+
compact.
|
624
|
+
map{|f| migration_version_from_file(File.basename(f))}.
|
625
|
+
select{|v| up? ? (v > current && v <= target) : (v <= current && v > target)}.
|
626
|
+
sort
|
627
|
+
versions.reverse! unless up?
|
628
|
+
versions
|
629
|
+
end
|
628
630
|
end
|
629
631
|
end
|
630
632
|
|
@@ -50,6 +50,34 @@
|
|
50
50
|
# See the {schema modification guide}[rdoc-ref:doc/schema_modification.rdoc]
|
51
51
|
# for details on using range type columns in CREATE/ALTER TABLE statements.
|
52
52
|
#
|
53
|
+
# This extension makes it easy to add support for other range types. In
|
54
|
+
# general, you just need to make sure that the subtype is handled and has the
|
55
|
+
# appropriate converter installed in Sequel::Postgres::PG_TYPES or the Database
|
56
|
+
# instance's conversion_procs usingthe appropriate type OID. For user defined
|
57
|
+
# types, you can do this via:
|
58
|
+
#
|
59
|
+
# DB.conversion_procs[subtype_oid] = lambda{|string| }
|
60
|
+
#
|
61
|
+
# Then you can call
|
62
|
+
# Sequel::Postgres::PGRange::DatabaseMethods#register_range_type
|
63
|
+
# to automatically set up a handler for the range type. So if you
|
64
|
+
# want to support the timerange type (assuming the time type is already
|
65
|
+
# supported):
|
66
|
+
#
|
67
|
+
# DB.register_range_type('timerange')
|
68
|
+
#
|
69
|
+
# You can also register range types on a global basis using
|
70
|
+
# Sequel::Postgres::PGRange.register. In this case, you'll have
|
71
|
+
# to specify the type oids:
|
72
|
+
#
|
73
|
+
# Sequel::Postgres::PG_TYPES[1234] = lambda{|string| }
|
74
|
+
# Sequel::Postgres::PGRange.register('foo', :oid=>4321, :subtype_oid=>1234)
|
75
|
+
#
|
76
|
+
# Both Sequel::Postgres::PGRange::DatabaseMethods#register_range_type
|
77
|
+
# and Sequel::Postgres::PGRange.register support many options to
|
78
|
+
# customize the range type handling. See the Sequel::Postgres::PGRange.register
|
79
|
+
# method documentation.
|
80
|
+
#
|
53
81
|
# This extension integrates with the pg_array extension. If you plan
|
54
82
|
# to use arrays of range types, load the pg_array extension before the
|
55
83
|
# pg_range extension:
|
@@ -94,11 +122,21 @@ module Sequel
|
|
94
122
|
# :subtype_oid :: Should be the PostgreSQL OID for the range's subtype. If given,
|
95
123
|
# automatically sets the :converter option by looking for scalar conversion
|
96
124
|
# proc.
|
125
|
+
# :type_procs :: A hash mapping oids to conversion procs, used for setting the default :converter
|
126
|
+
# for :subtype_oid. Defaults to the global Sequel::Postgres::PG_TYPES.
|
127
|
+
# :typecast_method_map :: The map in which to place the database type string to type symbol mapping.
|
128
|
+
# Defaults to RANGE_TYPES.
|
129
|
+
# :typecast_methods_module :: If given, a module object to add the typecasting method to. Defaults
|
130
|
+
# to DatabaseMethods.
|
97
131
|
#
|
98
132
|
# If a block is given, it is treated as the :converter option.
|
99
133
|
def self.register(db_type, opts=OPTS, &block)
|
100
134
|
db_type = db_type.to_s.dup.freeze
|
101
135
|
|
136
|
+
type_procs = opts[:type_procs] || PG_TYPES
|
137
|
+
mod = opts[:typecast_methods_module] || DatabaseMethods
|
138
|
+
typecast_method_map = opts[:typecast_method_map] || RANGE_TYPES
|
139
|
+
|
102
140
|
if converter = opts[:converter]
|
103
141
|
raise Error, "can't provide both a block and :converter option to register" if block
|
104
142
|
else
|
@@ -106,23 +144,34 @@ module Sequel
|
|
106
144
|
end
|
107
145
|
|
108
146
|
if soid = opts[:subtype_oid]
|
109
|
-
raise Error, "can't provide both a converter and :
|
110
|
-
raise Error, "no conversion proc for :
|
147
|
+
raise Error, "can't provide both a converter and :subtype_oid option to register" if converter
|
148
|
+
raise Error, "no conversion proc for :subtype_oid=>#{soid.inspect} in PG_TYPES" unless converter = type_procs[soid]
|
111
149
|
end
|
112
150
|
|
113
151
|
parser = Parser.new(db_type, converter)
|
114
152
|
|
115
|
-
|
153
|
+
typecast_method_map[db_type] = db_type.to_sym
|
116
154
|
|
117
|
-
|
155
|
+
define_range_typecast_method(mod, db_type, parser)
|
118
156
|
|
119
157
|
if oid = opts[:oid]
|
120
|
-
|
158
|
+
type_procs[oid] = parser
|
121
159
|
end
|
122
160
|
|
123
161
|
nil
|
124
162
|
end
|
125
163
|
|
164
|
+
# Define a private range typecasting method for the given type that uses
|
165
|
+
# the parser argument to do the type conversion.
|
166
|
+
def self.define_range_typecast_method(mod, type, parser)
|
167
|
+
mod.class_eval do
|
168
|
+
meth = :"typecast_value_#{type}"
|
169
|
+
define_method(meth){|v| typecast_value_pg_range(v, parser)}
|
170
|
+
private meth
|
171
|
+
end
|
172
|
+
end
|
173
|
+
private_class_method :define_range_typecast_method
|
174
|
+
|
126
175
|
# Creates callable objects that convert strings into PGRange instances.
|
127
176
|
class Parser
|
128
177
|
# Regexp that parses the full range of PostgreSQL range type output,
|
@@ -190,6 +239,7 @@ module Sequel
|
|
190
239
|
# and extend the datasets to correctly literalize ruby Range values.
|
191
240
|
def self.extended(db)
|
192
241
|
db.instance_eval do
|
242
|
+
@pg_range_schema_types ||= {}
|
193
243
|
extend_datasets(DatasetMethods)
|
194
244
|
copy_conversion_procs([3904, 3906, 3912, 3926, 3905, 3907, 3913, 3927])
|
195
245
|
[:int4range, :numrange, :tsrange, :tstzrange, :daterange, :int8range].each do |v|
|
@@ -207,14 +257,6 @@ module Sequel
|
|
207
257
|
|
208
258
|
end
|
209
259
|
|
210
|
-
# Define a private range typecasting method for the given type that uses
|
211
|
-
# the parser argument to do the type conversion.
|
212
|
-
def self.define_range_typecast_method(type, parser)
|
213
|
-
meth = :"typecast_value_#{type}"
|
214
|
-
define_method(meth){|v| typecast_value_pg_range(v, parser)}
|
215
|
-
private meth
|
216
|
-
end
|
217
|
-
|
218
260
|
# Handle Range and PGRange values in bound variables
|
219
261
|
def bound_variable_arg(arg, conn)
|
220
262
|
case arg
|
@@ -227,6 +269,23 @@ module Sequel
|
|
227
269
|
end
|
228
270
|
end
|
229
271
|
|
272
|
+
# Register a database specific range type. This can be used to support
|
273
|
+
# different range types per Database. Use of this method does not
|
274
|
+
# affect global state, unlike PGRange.register. See PGRange.register for
|
275
|
+
# possible options.
|
276
|
+
def register_range_type(db_type, opts=OPTS, &block)
|
277
|
+
opts = {:type_procs=>conversion_procs, :typecast_method_map=>@pg_range_schema_types, :typecast_methods_module=>(class << self; self; end)}.merge!(opts)
|
278
|
+
unless (opts.has_key?(:subtype_oid) || block) && opts.has_key?(:oid)
|
279
|
+
range_oid, subtype_oid = from(:pg_range).join(:pg_type, :oid=>:rngtypid).where(:typname=>db_type.to_s).get([:rngtypid, :rngsubtype])
|
280
|
+
opts[:subtype_oid] = subtype_oid unless opts.has_key?(:subtype_oid) || block
|
281
|
+
opts[:oid] = range_oid unless opts.has_key?(:oid)
|
282
|
+
end
|
283
|
+
|
284
|
+
PGRange.register(db_type, opts, &block)
|
285
|
+
@schema_type_classes[:"#{opts[:type_symbol] || db_type}"] = PGRange
|
286
|
+
conversion_procs_updated
|
287
|
+
end
|
288
|
+
|
230
289
|
private
|
231
290
|
|
232
291
|
# Handle arrays of range types in bound variables.
|
@@ -257,7 +316,7 @@ module Sequel
|
|
257
316
|
|
258
317
|
# Recognize the registered database range types.
|
259
318
|
def schema_column_type(db_type)
|
260
|
-
if type = RANGE_TYPES[db_type]
|
319
|
+
if type = @pg_range_schema_types[db_type] || RANGE_TYPES[db_type]
|
261
320
|
type
|
262
321
|
else
|
263
322
|
super
|
data/lib/sequel/model/base.rb
CHANGED
@@ -2367,12 +2367,12 @@ module Sequel
|
|
2367
2367
|
# # => {1=>#<Artist {:id=>1, ...}>,
|
2368
2368
|
# # 2=>#<Artist {:id=>2, ...}>,
|
2369
2369
|
# # ...}
|
2370
|
-
def to_hash(key_column=nil, value_column=nil)
|
2370
|
+
def to_hash(key_column=nil, value_column=nil, opts=OPTS)
|
2371
2371
|
if key_column
|
2372
2372
|
super
|
2373
2373
|
else
|
2374
2374
|
raise(Sequel::Error, "No primary key for model") unless model && (pk = model.primary_key)
|
2375
|
-
super(pk, value_column)
|
2375
|
+
super(pk, value_column, opts)
|
2376
2376
|
end
|
2377
2377
|
end
|
2378
2378
|
|
@@ -39,6 +39,14 @@ module Sequel
|
|
39
39
|
# # WHERE ((id >= 1) AND (id <= 100)))
|
40
40
|
# # AND
|
41
41
|
# # (name < 'M')))))
|
42
|
+
#
|
43
|
+
# For associations that do JOINs, such as many_to_many, note that the datasets returned
|
44
|
+
# by a dataset association method do not do a JOIN by default (they use a subquery that
|
45
|
+
# JOINs). This can cause problems when you are doing a select, order, or filter on a
|
46
|
+
# column in the joined table. In that case, you should use the +:dataset_associations_join+
|
47
|
+
# option in the association, which will make sure the datasets returned by the dataset
|
48
|
+
# association methods also use JOINs, allowing such dataset association methods to work
|
49
|
+
# correctly.
|
42
50
|
#
|
43
51
|
# Usage:
|
44
52
|
#
|
@@ -101,7 +109,19 @@ module Sequel
|
|
101
109
|
else
|
102
110
|
raise Error, "unrecognized association type for association #{name.inspect}: #{r[:type].inspect}"
|
103
111
|
end
|
104
|
-
|
112
|
+
|
113
|
+
ds = r.apply_eager_dataset_changes(ds).unlimited
|
114
|
+
|
115
|
+
if r[:dataset_associations_join]
|
116
|
+
case r[:type]
|
117
|
+
when :many_to_many, :one_through_one
|
118
|
+
ds = ds.join(r[:join_table], r[:right_keys].zip(r.right_primary_keys))
|
119
|
+
when :many_through_many, :one_through_many
|
120
|
+
(r.reverse_edges + [r.final_reverse_edge]).each{|e| ds = ds.join(e[:table], e.fetch(:only_conditions, (Array(e[:left]).zip(Array(e[:right])) + Array(e[:conditions]))), :table_alias=>ds.unused_table_alias(e[:table]), :qualify=>:deep, &e[:block])}
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
ds
|
105
125
|
end
|
106
126
|
end
|
107
127
|
end
|