sequel 4.25.0 → 4.26.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG +22 -0
- data/README.rdoc +7 -7
- data/doc/release_notes/4.26.0.txt +44 -0
- data/lib/sequel/adapters/shared/access.rb +2 -7
- data/lib/sequel/adapters/shared/cubrid.rb +18 -18
- data/lib/sequel/adapters/shared/db2.rb +5 -0
- data/lib/sequel/adapters/shared/mssql.rb +5 -0
- data/lib/sequel/adapters/shared/mysql.rb +52 -48
- data/lib/sequel/adapters/shared/oracle.rb +11 -0
- data/lib/sequel/adapters/shared/postgres.rb +20 -15
- data/lib/sequel/adapters/shared/sqlanywhere.rb +5 -0
- data/lib/sequel/adapters/tinytds.rb +2 -1
- data/lib/sequel/adapters/utils/emulate_offset_with_row_number.rb +21 -0
- data/lib/sequel/database/schema_generator.rb +9 -0
- data/lib/sequel/database/schema_methods.rb +39 -21
- data/lib/sequel/dataset/actions.rb +2 -2
- data/lib/sequel/dataset/features.rb +5 -0
- data/lib/sequel/dataset/graph.rb +3 -3
- data/lib/sequel/dataset/misc.rb +3 -3
- data/lib/sequel/dataset/prepared_statements.rb +7 -1
- data/lib/sequel/dataset/query.rb +6 -0
- data/lib/sequel/dataset/sql.rb +21 -1
- data/lib/sequel/exceptions.rb +14 -1
- data/lib/sequel/extensions/pg_range.rb +16 -5
- data/lib/sequel/model/base.rb +10 -8
- data/lib/sequel/plugins/association_pks.rb +2 -2
- data/lib/sequel/plugins/single_table_inheritance.rb +1 -1
- data/lib/sequel/version.rb +1 -1
- data/spec/adapters/postgres_spec.rb +17 -0
- data/spec/core/dataset_spec.rb +23 -1
- data/spec/core_extensions_spec.rb +1 -1
- data/spec/extensions/class_table_inheritance_spec.rb +147 -5
- data/spec/extensions/core_refinements_spec.rb +1 -1
- data/spec/extensions/pg_range_ops_spec.rb +1 -1
- data/spec/extensions/pg_range_spec.rb +1 -1
- data/spec/integration/dataset_test.rb +13 -4
- data/spec/integration/plugin_test.rb +1 -0
- data/spec/model/base_spec.rb +7 -0
- metadata +5 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: cf4997b7f0149580e9838f7a7c1d0802ce9fdd6b
|
4
|
+
data.tar.gz: 21d723dcf70b74e58a194e71c0d1fb31a6775917
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 851a0cbbd200cffc7280a35168450fcfdb690af48b21c5adf6864289df9fa8eb2b7e7999b396598d0bf3db371e1e721db896bdb869288aa79b8d5db36467b2ed
|
7
|
+
data.tar.gz: 806d66ebf5e196040b42bc023dc2ac13a374037df98e419cc1db69bc2a52c10cd4bae8e6e22c79681ed27256fbba81e109d3a0a593ae62831d3f0cd827c6e2f8
|
data/CHANGELOG
CHANGED
@@ -1,3 +1,25 @@
|
|
1
|
+
=== 4.26.0 (2015-09-01)
|
2
|
+
|
3
|
+
* Make Dataset#== not consider frozen status in determining equality (jeremyevans)
|
4
|
+
|
5
|
+
* Support :if_exists option to drop_column on PostgreSQL (jeremyevans)
|
6
|
+
|
7
|
+
* Add Dataset#grouping_sets to support GROUP BY GROUPING SETS on PostgreSQL 9.5+, MSSQL 2008+, Oracle, DB2, and SQLAnywhere (jeremyevans)
|
8
|
+
|
9
|
+
* Fix handling of Class.new(ModelClass){set_dataset :table} on ruby 1.8 (jeremyevans)
|
10
|
+
|
11
|
+
* Use range function constructors instead of casts for known range types in pg_range (jeremyevans) (#1066)
|
12
|
+
|
13
|
+
* Make class_table_inheritance plugin work without sti_key (jeremyevans)
|
14
|
+
|
15
|
+
* Detect additional disconnect errors when using the tinytds adapter (jeremyevans)
|
16
|
+
|
17
|
+
* Make offset emulation without order but with explicit selection handle ambiguous column names (jeremyevans)
|
18
|
+
|
19
|
+
* Allow preparing already prepared statements when emulating limits and/or offsets (jeremyevans)
|
20
|
+
|
21
|
+
* Have Sequel::NoMatchingRow exceptions record the dataset related to the exception (pedro, jeremyevans) (#1060)
|
22
|
+
|
1
23
|
=== 4.25.0 (2015-08-01)
|
2
24
|
|
3
25
|
* Add Dataset#insert_conflict on PostgreSQL 9.5+, for upsert/insert ignore support using INSERT ON CONFLICT (jeremyevans)
|
data/README.rdoc
CHANGED
@@ -17,15 +17,15 @@ toolkit for Ruby.
|
|
17
17
|
|
18
18
|
== Resources
|
19
19
|
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
20
|
+
Website :: http://sequel.jeremyevans.net
|
21
|
+
RDoc Documentation :: http://sequel.jeremyevans.net/rdoc
|
22
|
+
Source Code :: https://github.com/jeremyevans/sequel
|
23
|
+
Bug tracking (GitHub Issues) :: http://github.com/jeremyevans/sequel/issues
|
24
|
+
Discussion Forum (sequel-talk Google Group) :: http://groups.google.com/group/sequel-talk
|
25
|
+
IRC Channel (#sequel) :: irc://irc.freenode.net/sequel
|
26
26
|
|
27
27
|
If you have questions about how to use Sequel, please ask on the
|
28
|
-
Google Group or IRC. Only use the the bug tracker to report
|
28
|
+
sequel-talk Google Group or IRC. Only use the the bug tracker to report
|
29
29
|
bugs in Sequel, not to ask for help on using Sequel.
|
30
30
|
|
31
31
|
To check out the source code:
|
@@ -0,0 +1,44 @@
|
|
1
|
+
= New Features
|
2
|
+
|
3
|
+
* Add Dataset#grouping_sets to support GROUP BY GROUPING SETS on
|
4
|
+
PostgreSQL 9.5+, MSSQL 2008+, Oracle, DB2, and SQLAnywhere:
|
5
|
+
|
6
|
+
DB[:test].group([:type_id, :b], :type_id, []).grouping_sets
|
7
|
+
# SELECT * FROM test
|
8
|
+
# GROUP BY GROUPING SETS((type_id, b), (type_id), ())
|
9
|
+
|
10
|
+
* Sequel::NoMatchingRow exceptions raised by Sequel now give access
|
11
|
+
to the dataset that raised the exception via the dataset method.
|
12
|
+
This makes it easier to write generic error handling code.
|
13
|
+
|
14
|
+
* Support :if_exists option to drop_column on PostgreSQL:
|
15
|
+
|
16
|
+
DB.drop_column :t, :col, :if_exists=>true
|
17
|
+
ALTER TABLE t DROP COLUMN IF EXISTS col
|
18
|
+
|
19
|
+
= Other Improvements
|
20
|
+
|
21
|
+
* Make the class_table_inheritance plugin work correctly without an
|
22
|
+
sti_key. This was broken in a recent refactoring to make class
|
23
|
+
table inheritance support multiple classes for a single table.
|
24
|
+
|
25
|
+
* Make Class.new(ModelClass){set_dataset :table} work correctly on
|
26
|
+
ruby 1.8. This was broken in a refactoring to allow the
|
27
|
+
singular_table_names plugin to work.
|
28
|
+
|
29
|
+
* Make offset emulation via ROW_NUMBER better handle ambiguous column
|
30
|
+
names for datasets without an ORDER BY clause, but with an explicit
|
31
|
+
SELECT clause.
|
32
|
+
|
33
|
+
* Make pg_range extension use PostgreSQL range function constructors
|
34
|
+
instead of casting string literals to the appropriate range type,
|
35
|
+
if the range type is known. This allows arbitrary expressions to
|
36
|
+
be used inside ranges, such as CURRENT_TIMESTAMP in timestamp
|
37
|
+
ranges.
|
38
|
+
|
39
|
+
* Make Dataset#== not consider frozen status.
|
40
|
+
|
41
|
+
* Allow Dataset#prepare on already prepared statements in situations
|
42
|
+
where determining the SQL for a prepared statement requires it.
|
43
|
+
|
44
|
+
* Detect additional disconnect errors when using the tinytds adapter.
|
@@ -29,13 +29,8 @@ module Sequel
|
|
29
29
|
|
30
30
|
private
|
31
31
|
|
32
|
-
def
|
33
|
-
|
34
|
-
when :set_column_type
|
35
|
-
"ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(op)}"
|
36
|
-
else
|
37
|
-
super
|
38
|
-
end
|
32
|
+
def alter_table_set_column_type_sql(table, op)
|
33
|
+
"ALTER COLUMN #{quote_identifier(op[:name])} #{type_literal(op)}"
|
39
34
|
end
|
40
35
|
|
41
36
|
# Access doesn't support CREATE TABLE AS, it only supports SELECT INTO.
|
@@ -81,24 +81,24 @@ module Sequel
|
|
81
81
|
map{|c| m.call(c)}
|
82
82
|
end
|
83
83
|
|
84
|
-
def
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
84
|
+
def alter_table_rename_column_sql(table, op)
|
85
|
+
"RENAME COLUMN #{quote_identifier(op[:name])} AS #{quote_identifier(op[:new_name])}"
|
86
|
+
end
|
87
|
+
|
88
|
+
def alter_table_change_column_sql(table, op)
|
89
|
+
o = op[:op]
|
90
|
+
opts = schema(table).find{|x| x.first == op[:name]}
|
91
|
+
opts = opts ? opts.last.dup : {}
|
92
|
+
opts[:name] = o == :rename_column ? op[:new_name] : op[:name]
|
93
|
+
opts[:type] = o == :set_column_type ? op[:type] : opts[:db_type]
|
94
|
+
opts[:null] = o == :set_column_null ? op[:null] : opts[:allow_null]
|
95
|
+
opts[:default] = o == :set_column_default ? op[:default] : opts[:ruby_default]
|
96
|
+
opts.delete(:default) if opts[:default] == nil
|
97
|
+
"CHANGE COLUMN #{quote_identifier(op[:name])} #{column_definition_sql(op.merge(opts))}"
|
98
|
+
end
|
99
|
+
alias alter_table_set_column_type_sql alter_table_change_column_sql
|
100
|
+
alias alter_table_set_column_null_sql alter_table_change_column_sql
|
101
|
+
alias alter_table_set_column_default_sql alter_table_change_column_sql
|
102
102
|
|
103
103
|
def alter_table_sql(table, op)
|
104
104
|
case op[:op]
|
@@ -729,6 +729,11 @@ module Sequel
|
|
729
729
|
is_2005_or_later?
|
730
730
|
end
|
731
731
|
|
732
|
+
# MSSQL 2005+ supports GROUPING SETS
|
733
|
+
def supports_grouping_sets?
|
734
|
+
is_2008_or_later?
|
735
|
+
end
|
736
|
+
|
732
737
|
# MSSQL supports insert_select via the OUTPUT clause.
|
733
738
|
def supports_insert_select?
|
734
739
|
supports_output_clause? && !opts[:disable_insert_output]
|
@@ -185,60 +185,64 @@ module Sequel
|
|
185
185
|
|
186
186
|
private
|
187
187
|
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
sql << ", ADD "
|
197
|
-
if constraint_name = op.delete(:foreign_key_constraint_name)
|
198
|
-
sql << "CONSTRAINT #{quote_identifier(constraint_name)} "
|
199
|
-
end
|
200
|
-
sql << "FOREIGN KEY (#{quote_identifier(op[:name])})#{column_references_sql(op)}"
|
201
|
-
else
|
202
|
-
super
|
203
|
-
end
|
204
|
-
when :rename_column, :set_column_type, :set_column_null, :set_column_default
|
205
|
-
o = op[:op]
|
206
|
-
opts = schema(table).find{|x| x.first == op[:name]}
|
207
|
-
opts = opts ? opts.last.dup : {}
|
208
|
-
opts[:name] = o == :rename_column ? op[:new_name] : op[:name]
|
209
|
-
opts[:type] = o == :set_column_type ? op[:type] : opts[:db_type]
|
210
|
-
opts[:null] = o == :set_column_null ? op[:null] : opts[:allow_null]
|
211
|
-
opts[:default] = o == :set_column_default ? op[:default] : opts[:ruby_default]
|
212
|
-
opts.delete(:default) if opts[:default] == nil
|
213
|
-
opts.delete(:primary_key)
|
214
|
-
unless op[:type] || opts[:type]
|
215
|
-
raise Error, "cannot determine database type to use for CHANGE COLUMN operation"
|
216
|
-
end
|
217
|
-
opts = op.merge(opts)
|
218
|
-
if op.has_key?(:auto_increment)
|
219
|
-
opts[:auto_increment] = op[:auto_increment]
|
188
|
+
def alter_table_add_column_sql(table, op)
|
189
|
+
if related = op.delete(:table)
|
190
|
+
sql = super
|
191
|
+
op[:table] = related
|
192
|
+
op[:key] ||= primary_key_from_schema(related)
|
193
|
+
sql << ", ADD "
|
194
|
+
if constraint_name = op.delete(:foreign_key_constraint_name)
|
195
|
+
sql << "CONSTRAINT #{quote_identifier(constraint_name)} "
|
220
196
|
end
|
221
|
-
"
|
222
|
-
when :drop_constraint
|
223
|
-
case op[:type]
|
224
|
-
when :primary_key
|
225
|
-
"DROP PRIMARY KEY"
|
226
|
-
when :foreign_key
|
227
|
-
name = op[:name] || foreign_key_name(table, op[:columns])
|
228
|
-
"DROP FOREIGN KEY #{quote_identifier(name)}"
|
229
|
-
when :unique
|
230
|
-
"DROP INDEX #{quote_identifier(op[:name])}"
|
231
|
-
end
|
232
|
-
when :add_constraint
|
233
|
-
if op[:type] == :foreign_key
|
234
|
-
op[:key] ||= primary_key_from_schema(op[:table])
|
235
|
-
end
|
236
|
-
super
|
197
|
+
sql << "FOREIGN KEY (#{quote_identifier(op[:name])})#{column_references_sql(op)}"
|
237
198
|
else
|
238
199
|
super
|
239
200
|
end
|
240
201
|
end
|
241
202
|
|
203
|
+
def alter_table_change_column_sql(table, op)
|
204
|
+
o = op[:op]
|
205
|
+
opts = schema(table).find{|x| x.first == op[:name]}
|
206
|
+
opts = opts ? opts.last.dup : {}
|
207
|
+
opts[:name] = o == :rename_column ? op[:new_name] : op[:name]
|
208
|
+
opts[:type] = o == :set_column_type ? op[:type] : opts[:db_type]
|
209
|
+
opts[:null] = o == :set_column_null ? op[:null] : opts[:allow_null]
|
210
|
+
opts[:default] = o == :set_column_default ? op[:default] : opts[:ruby_default]
|
211
|
+
opts.delete(:default) if opts[:default] == nil
|
212
|
+
opts.delete(:primary_key)
|
213
|
+
unless op[:type] || opts[:type]
|
214
|
+
raise Error, "cannot determine database type to use for CHANGE COLUMN operation"
|
215
|
+
end
|
216
|
+
opts = op.merge(opts)
|
217
|
+
if op.has_key?(:auto_increment)
|
218
|
+
opts[:auto_increment] = op[:auto_increment]
|
219
|
+
end
|
220
|
+
"CHANGE COLUMN #{quote_identifier(op[:name])} #{column_definition_sql(opts)}"
|
221
|
+
end
|
222
|
+
alias alter_table_rename_column_sql alter_table_change_column_sql
|
223
|
+
alias alter_table_set_column_type_sql alter_table_change_column_sql
|
224
|
+
alias alter_table_set_column_null_sql alter_table_change_column_sql
|
225
|
+
alias alter_table_set_column_default_sql alter_table_change_column_sql
|
226
|
+
|
227
|
+
def alter_table_add_constraint_sql(table, op)
|
228
|
+
if op[:type] == :foreign_key
|
229
|
+
op[:key] ||= primary_key_from_schema(op[:table])
|
230
|
+
end
|
231
|
+
super
|
232
|
+
end
|
233
|
+
|
234
|
+
def alter_table_drop_constraint_sql(table, op)
|
235
|
+
case op[:type]
|
236
|
+
when :primary_key
|
237
|
+
"DROP PRIMARY KEY"
|
238
|
+
when :foreign_key
|
239
|
+
name = op[:name] || foreign_key_name(table, op[:columns])
|
240
|
+
"DROP FOREIGN KEY #{quote_identifier(name)}"
|
241
|
+
when :unique
|
242
|
+
"DROP INDEX #{quote_identifier(op[:name])}"
|
243
|
+
end
|
244
|
+
end
|
245
|
+
|
242
246
|
# MySQL server requires table names when dropping indexes.
|
243
247
|
def alter_table_sql(table, op)
|
244
248
|
case op[:op]
|
@@ -393,6 +393,11 @@ module Sequel
|
|
393
393
|
true
|
394
394
|
end
|
395
395
|
|
396
|
+
# Oracle supports GROUPING SETS
|
397
|
+
def supports_grouping_sets?
|
398
|
+
true
|
399
|
+
end
|
400
|
+
|
396
401
|
# Oracle does not support INTERSECT ALL or EXCEPT ALL
|
397
402
|
def supports_intersect_except_all?
|
398
403
|
false
|
@@ -435,6 +440,12 @@ module Sequel
|
|
435
440
|
|
436
441
|
private
|
437
442
|
|
443
|
+
# Allow preparing prepared statements, since determining the prepared sql to use for
|
444
|
+
# a prepared statement requires calling prepare on that statement.
|
445
|
+
def allow_preparing_prepared_statements?
|
446
|
+
true
|
447
|
+
end
|
448
|
+
|
438
449
|
# Oracle doesn't support the use of AS when aliasing a dataset. It doesn't require
|
439
450
|
# the use of AS anywhere, so this disables it in all cases.
|
440
451
|
def as_sql_append(sql, aliaz, column_aliases=nil)
|
@@ -579,22 +579,22 @@ module Sequel
|
|
579
579
|
Postgres::AlterTableGenerator
|
580
580
|
end
|
581
581
|
|
582
|
-
|
583
|
-
|
584
|
-
|
585
|
-
|
586
|
-
s
|
587
|
-
|
588
|
-
using = Sequel::LiteralString.new(using) if using.is_a?(String)
|
589
|
-
s << ' USING '
|
590
|
-
s << literal(using)
|
591
|
-
end
|
592
|
-
s
|
593
|
-
when :validate_constraint
|
594
|
-
"VALIDATE CONSTRAINT #{quote_identifier(op[:name])}"
|
595
|
-
else
|
596
|
-
super
|
582
|
+
def alter_table_set_column_type_sql(table, op)
|
583
|
+
s = super
|
584
|
+
if using = op[:using]
|
585
|
+
using = Sequel::LiteralString.new(using) if using.is_a?(String)
|
586
|
+
s << ' USING '
|
587
|
+
s << literal(using)
|
597
588
|
end
|
589
|
+
s
|
590
|
+
end
|
591
|
+
|
592
|
+
def alter_table_drop_column_sql(table, op)
|
593
|
+
"DROP COLUMN #{'IF EXISTS ' if op[:if_exists]}#{quote_identifier(op[:name])}#{' CASCADE' if op[:cascade]}"
|
594
|
+
end
|
595
|
+
|
596
|
+
def alter_table_validate_constraint_sql(table, op)
|
597
|
+
"VALIDATE CONSTRAINT #{quote_identifier(op[:name])}"
|
598
598
|
end
|
599
599
|
|
600
600
|
# If the :synchronous option is given and non-nil, set synchronous_commit
|
@@ -1462,6 +1462,11 @@ module Sequel
|
|
1462
1462
|
server_version >= 90500
|
1463
1463
|
end
|
1464
1464
|
|
1465
|
+
# PostgreSQL 9.5+ supports GROUPING SETS
|
1466
|
+
def supports_grouping_sets?
|
1467
|
+
server_version >= 90500
|
1468
|
+
end
|
1469
|
+
|
1465
1470
|
# True unless insert returning has been disabled for this dataset.
|
1466
1471
|
def supports_insert_select?
|
1467
1472
|
!@opts[:disable_insert_returning]
|
@@ -129,9 +129,10 @@ module Sequel
|
|
129
129
|
end
|
130
130
|
end
|
131
131
|
|
132
|
+
TINYTDS_DISCONNECT_ERRORS = /\A(Attempt to initiate a new Adaptive Server operation with results pending|The request failed to run because the batch is aborted, this can be caused by abort signal sent from client)/
|
132
133
|
# Return true if the :conn argument is present and not active.
|
133
134
|
def disconnect_error?(e, opts)
|
134
|
-
super || (opts[:conn] && !opts[:conn].active?)
|
135
|
+
super || (opts[:conn] && !opts[:conn].active?) || ((e.is_a?(::TinyTds::Error) && TINYTDS_DISCONNECT_ERRORS.match(e.message)))
|
135
136
|
end
|
136
137
|
|
137
138
|
# Dispose of any possible results of execution.
|
@@ -51,9 +51,30 @@ module Sequel
|
|
51
51
|
|
52
52
|
private
|
53
53
|
|
54
|
+
# Allow preparing prepared statements, since determining the prepared sql to use for
|
55
|
+
# a prepared statement requires calling prepare on that statement.
|
56
|
+
def allow_preparing_prepared_statements?
|
57
|
+
true
|
58
|
+
end
|
59
|
+
|
54
60
|
# The default order to use for datasets with offsets, if no order is defined.
|
55
61
|
# By default, orders by all of the columns in the dataset.
|
56
62
|
def default_offset_order
|
63
|
+
if (cols = opts[:select])
|
64
|
+
cols.each do |c|
|
65
|
+
case c
|
66
|
+
when Symbol
|
67
|
+
return [split_alias(c).first]
|
68
|
+
when SQL::Identifier, SQL::QualifiedIdentifier
|
69
|
+
return [c]
|
70
|
+
when SQL::AliasedExpression
|
71
|
+
case c.expression
|
72
|
+
when Symbol, SQL::Identifier, SQL::QualifiedIdentifier
|
73
|
+
return [c.expression]
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
57
78
|
clone(:append_sql=>'').columns
|
58
79
|
end
|
59
80
|
|
@@ -407,6 +407,15 @@ module Sequel
|
|
407
407
|
#
|
408
408
|
# drop_column(:artist_id) # DROP COLUMN artist_id
|
409
409
|
# drop_column(:artist_id, :cascade=>true) # DROP COLUMN artist_id CASCADE
|
410
|
+
#
|
411
|
+
# Options:
|
412
|
+
#
|
413
|
+
# :cascade :: CASCADE the operation, dropping other objects that depend on
|
414
|
+
# the dropped column.
|
415
|
+
#
|
416
|
+
# PostgreSQL specific options:
|
417
|
+
# :if_exists :: Use IF EXISTS, so no error is raised if the column does not
|
418
|
+
# exist.
|
410
419
|
def drop_column(name, opts=OPTS)
|
411
420
|
@operations << {:op => :drop_column, :name => name}.merge!(opts)
|
412
421
|
end
|
@@ -430,32 +430,50 @@ module Sequel
|
|
430
430
|
|
431
431
|
# SQL fragment for given alter table operation.
|
432
432
|
def alter_table_op_sql(table, op)
|
433
|
-
|
434
|
-
|
435
|
-
|
436
|
-
"ADD COLUMN #{column_definition_sql(op)}"
|
437
|
-
when :drop_column
|
438
|
-
"DROP COLUMN #{quoted_name}#{' CASCADE' if op[:cascade]}"
|
439
|
-
when :rename_column
|
440
|
-
"RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
|
441
|
-
when :set_column_type
|
442
|
-
"ALTER COLUMN #{quoted_name} TYPE #{type_literal(op)}"
|
443
|
-
when :set_column_default
|
444
|
-
"ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
|
445
|
-
when :set_column_null
|
446
|
-
"ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL"
|
447
|
-
when :add_constraint
|
448
|
-
"ADD #{constraint_definition_sql(op)}"
|
449
|
-
when :drop_constraint
|
450
|
-
if op[:type] == :foreign_key
|
451
|
-
quoted_name ||= quote_identifier(foreign_key_name(table, op[:columns]))
|
452
|
-
end
|
453
|
-
"DROP CONSTRAINT #{quoted_name}#{' CASCADE' if op[:cascade]}"
|
433
|
+
meth = "alter_table_#{op[:op]}_sql"
|
434
|
+
if respond_to?(meth, true)
|
435
|
+
send(meth, table, op)
|
454
436
|
else
|
455
437
|
raise Error, "Unsupported ALTER TABLE operation: #{op[:op]}"
|
456
438
|
end
|
457
439
|
end
|
458
440
|
|
441
|
+
def alter_table_add_column_sql(table, op)
|
442
|
+
"ADD COLUMN #{column_definition_sql(op)}"
|
443
|
+
end
|
444
|
+
|
445
|
+
def alter_table_drop_column_sql(table, op)
|
446
|
+
"DROP COLUMN #{quote_identifier(op[:name])}#{' CASCADE' if op[:cascade]}"
|
447
|
+
end
|
448
|
+
|
449
|
+
def alter_table_rename_column_sql(table, op)
|
450
|
+
"RENAME COLUMN #{quote_identifier(op[:name])} TO #{quote_identifier(op[:new_name])}"
|
451
|
+
end
|
452
|
+
|
453
|
+
def alter_table_set_column_type_sql(table, op)
|
454
|
+
"ALTER COLUMN #{quote_identifier(op[:name])} TYPE #{type_literal(op)}"
|
455
|
+
end
|
456
|
+
|
457
|
+
def alter_table_set_column_default_sql(table, op)
|
458
|
+
"ALTER COLUMN #{quote_identifier(op[:name])} SET DEFAULT #{literal(op[:default])}"
|
459
|
+
end
|
460
|
+
|
461
|
+
def alter_table_set_column_null_sql(table, op)
|
462
|
+
"ALTER COLUMN #{quote_identifier(op[:name])} #{op[:null] ? 'DROP' : 'SET'} NOT NULL"
|
463
|
+
end
|
464
|
+
|
465
|
+
def alter_table_add_constraint_sql(table, op)
|
466
|
+
"ADD #{constraint_definition_sql(op)}"
|
467
|
+
end
|
468
|
+
|
469
|
+
def alter_table_drop_constraint_sql(table, op)
|
470
|
+
quoted_name = quote_identifier(op[:name]) if op[:name]
|
471
|
+
if op[:type] == :foreign_key
|
472
|
+
quoted_name ||= quote_identifier(foreign_key_name(table, op[:columns]))
|
473
|
+
end
|
474
|
+
"DROP CONSTRAINT #{quoted_name}#{' CASCADE' if op[:cascade]}"
|
475
|
+
end
|
476
|
+
|
459
477
|
# The SQL to execute to modify the DDL for the given table name. op
|
460
478
|
# should be one of the operations returned by the AlterTableGenerator.
|
461
479
|
def alter_table_sql(table, op)
|
@@ -203,7 +203,7 @@ module Sequel
|
|
203
203
|
# Calls first. If first returns nil (signaling that no
|
204
204
|
# row matches), raise a Sequel::NoMatchingRow exception.
|
205
205
|
def first!(*args, &block)
|
206
|
-
first(*args, &block) || raise(Sequel::NoMatchingRow)
|
206
|
+
first(*args, &block) || raise(Sequel::NoMatchingRow.new(self))
|
207
207
|
end
|
208
208
|
|
209
209
|
# Return the column value for the first matching record in the dataset.
|
@@ -268,7 +268,7 @@ module Sequel
|
|
268
268
|
# :commit_every :: Open a new transaction for every given number of records.
|
269
269
|
# For example, if you provide a value of 50, will commit
|
270
270
|
# after every 50 records.
|
271
|
-
# :return :: When
|
271
|
+
# :return :: When this is set to :primary_key, returns an array of
|
272
272
|
# autoincremented primary key values for the rows inserted.
|
273
273
|
# :server :: Set the server/shard to use for the transaction and insert
|
274
274
|
# queries.
|
@@ -76,6 +76,11 @@ module Sequel
|
|
76
76
|
false
|
77
77
|
end
|
78
78
|
|
79
|
+
# Whether the dataset supports GROUPING SETS with GROUP BY.
|
80
|
+
def supports_grouping_sets?
|
81
|
+
false
|
82
|
+
end
|
83
|
+
|
79
84
|
# Whether this dataset supports the +insert_select+ method for returning all columns values
|
80
85
|
# directly from an insert query.
|
81
86
|
def supports_insert_select?
|
data/lib/sequel/dataset/graph.rb
CHANGED
@@ -2,9 +2,9 @@ module Sequel
|
|
2
2
|
class Dataset
|
3
3
|
# ---------------------
|
4
4
|
# :section: 5 - Methods related to dataset graphing
|
5
|
-
# Dataset graphing
|
6
|
-
#
|
7
|
-
#
|
5
|
+
# Dataset graphing automatically creates unique aliases columns in join
|
6
|
+
# tables that overlap with already selected column aliases.
|
7
|
+
# All of these methods return modified copies of the receiver.
|
8
8
|
# ---------------------
|
9
9
|
|
10
10
|
# Adds the given graph aliases to the list of graph aliases to use,
|
data/lib/sequel/dataset/misc.rb
CHANGED
@@ -50,7 +50,7 @@ module Sequel
|
|
50
50
|
# Similar to #clone, but returns an unfrozen clone if the receiver is frozen.
|
51
51
|
def dup
|
52
52
|
o = clone
|
53
|
-
o.
|
53
|
+
o.instance_variable_set(:@frozen, false) if frozen?
|
54
54
|
o
|
55
55
|
end
|
56
56
|
|
@@ -74,13 +74,13 @@ module Sequel
|
|
74
74
|
|
75
75
|
# Sets the frozen flag on the dataset, so you can't modify it. Returns the receiver.
|
76
76
|
def freeze
|
77
|
-
@
|
77
|
+
@frozen = true
|
78
78
|
self
|
79
79
|
end
|
80
80
|
|
81
81
|
# Whether the object is frozen.
|
82
82
|
def frozen?
|
83
|
-
@
|
83
|
+
@frozen == true
|
84
84
|
end
|
85
85
|
|
86
86
|
# Alias of +first_source_alias+
|
@@ -100,7 +100,8 @@ module Sequel
|
|
100
100
|
# Raise an error if attempting to call prepare on an already
|
101
101
|
# prepared statement.
|
102
102
|
def prepare(*)
|
103
|
-
raise Error, "cannot prepare an already prepared statement"
|
103
|
+
raise Error, "cannot prepare an already prepared statement" unless allow_preparing_prepared_statements?
|
104
|
+
super
|
104
105
|
end
|
105
106
|
|
106
107
|
# Send the columns to the original dataset, as calling it
|
@@ -309,6 +310,11 @@ module Sequel
|
|
309
310
|
|
310
311
|
private
|
311
312
|
|
313
|
+
# Don't allow preparing prepared statements by default.
|
314
|
+
def allow_preparing_prepared_statements?
|
315
|
+
false
|
316
|
+
end
|
317
|
+
|
312
318
|
# The argument placeholder. Most databases used unnumbered
|
313
319
|
# arguments with question marks, so that is the default.
|
314
320
|
def prepared_arg_placeholder
|
data/lib/sequel/dataset/query.rb
CHANGED
@@ -337,6 +337,12 @@ module Sequel
|
|
337
337
|
clone(:group_options=>:rollup)
|
338
338
|
end
|
339
339
|
|
340
|
+
# Adds the appropriate GROUPING SETS syntax to GROUP BY.
|
341
|
+
def grouping_sets
|
342
|
+
raise Error, "GROUP BY GROUPING SETS not supported on #{db.database_type}" unless supports_grouping_sets?
|
343
|
+
clone(:group_options=>:"grouping sets")
|
344
|
+
end
|
345
|
+
|
340
346
|
# Returns a copy of the dataset with the HAVING conditions changed. See #where for argument types.
|
341
347
|
#
|
342
348
|
# DB[:items].group(:sum).having(:sum=>10)
|
data/lib/sequel/dataset/sql.rb
CHANGED
@@ -271,6 +271,7 @@ module Sequel
|
|
271
271
|
DOUBLE_APOS = "''".freeze
|
272
272
|
DOUBLE_QUOTE = '""'.freeze
|
273
273
|
EQUAL = ' = '.freeze
|
274
|
+
EMPTY_PARENS = '()'.freeze
|
274
275
|
ESCAPE = " ESCAPE ".freeze
|
275
276
|
EXTRACT = 'extract('.freeze
|
276
277
|
EXISTS = ['EXISTS '.freeze].freeze
|
@@ -1008,6 +1009,21 @@ module Sequel
|
|
1008
1009
|
end
|
1009
1010
|
end
|
1010
1011
|
|
1012
|
+
# Append literalization of array of grouping elements to SQL string.
|
1013
|
+
def grouping_element_list_append(sql, columns)
|
1014
|
+
c = false
|
1015
|
+
co = COMMA
|
1016
|
+
columns.each do |col|
|
1017
|
+
sql << co if c
|
1018
|
+
if col.is_a?(Array) && col.empty?
|
1019
|
+
sql << EMPTY_PARENS
|
1020
|
+
else
|
1021
|
+
literal_append(sql, Array(col))
|
1022
|
+
end
|
1023
|
+
c ||= true
|
1024
|
+
end
|
1025
|
+
end
|
1026
|
+
|
1011
1027
|
# An expression for how to handle an empty array lookup.
|
1012
1028
|
def empty_array_value(op, cols)
|
1013
1029
|
{1 => ((op == :IN) ? 0 : 1)}
|
@@ -1352,7 +1368,11 @@ module Sequel
|
|
1352
1368
|
if group = @opts[:group]
|
1353
1369
|
sql << GROUP_BY
|
1354
1370
|
if go = @opts[:group_options]
|
1355
|
-
if
|
1371
|
+
if go == :"grouping sets"
|
1372
|
+
sql << go.to_s.upcase << PAREN_OPEN
|
1373
|
+
grouping_element_list_append(sql, group)
|
1374
|
+
sql << PAREN_CLOSE
|
1375
|
+
elsif uses_with_rollup?
|
1356
1376
|
expression_list_append(sql, group)
|
1357
1377
|
sql << SPACE_WITH << go.to_s.upcase
|
1358
1378
|
else
|
data/lib/sequel/exceptions.rb
CHANGED
@@ -52,7 +52,20 @@ module Sequel
|
|
52
52
|
|
53
53
|
# Error raised when the user requests a record via the first! or similar
|
54
54
|
# method, and the dataset does not yield any rows.
|
55
|
-
NoMatchingRow
|
55
|
+
class NoMatchingRow < Error
|
56
|
+
# The dataset that raised this NoMatchingRow exception.
|
57
|
+
attr_accessor :dataset
|
58
|
+
|
59
|
+
# If the first argument is a Sequel::Dataset, set the dataset related to
|
60
|
+
# the exception to that argument, instead of assuming it is the exception message.
|
61
|
+
def initialize(msg=nil)
|
62
|
+
if msg.is_a?(Sequel::Dataset)
|
63
|
+
@dataset = msg
|
64
|
+
msg = nil
|
65
|
+
end
|
66
|
+
super
|
67
|
+
end
|
68
|
+
end
|
56
69
|
|
57
70
|
# Error raised when the connection pool cannot acquire a database connection
|
58
71
|
# before the timeout.
|
@@ -71,6 +71,7 @@ module Sequel
|
|
71
71
|
|
72
72
|
EMPTY = 'empty'.freeze
|
73
73
|
EMPTY_STRING = ''.freeze
|
74
|
+
COMMA = ','.freeze
|
74
75
|
QUOTED_EMPTY_STRING = '""'.freeze
|
75
76
|
OPEN_PAREN = "(".freeze
|
76
77
|
CLOSE_PAREN = ")".freeze
|
@@ -91,8 +92,8 @@ module Sequel
|
|
91
92
|
# :oid :: The PostgreSQL OID for the range type. This is used by the Sequel postgres adapter
|
92
93
|
# to set up automatic type conversion on retrieval from the database.
|
93
94
|
# :subtype_oid :: Should be the PostgreSQL OID for the range's subtype. If given,
|
94
|
-
#
|
95
|
-
#
|
95
|
+
# automatically sets the :converter option by looking for scalar conversion
|
96
|
+
# proc.
|
96
97
|
#
|
97
98
|
# If a block is given, it is treated as the :converter option.
|
98
99
|
def self.register(db_type, opts=OPTS, &block)
|
@@ -414,9 +415,19 @@ module Sequel
|
|
414
415
|
|
415
416
|
# Append a literalize version of the receiver to the sql.
|
416
417
|
def sql_literal_append(ds, sql)
|
417
|
-
|
418
|
-
|
419
|
-
sql
|
418
|
+
if (s = @db_type) && !empty?
|
419
|
+
sql << s.to_s << OPEN_PAREN
|
420
|
+
ds.literal_append(sql, self.begin)
|
421
|
+
sql << COMMA
|
422
|
+
ds.literal_append(sql, self.end)
|
423
|
+
sql << COMMA
|
424
|
+
ds.literal_append(sql, "#{exclude_begin? ? OPEN_PAREN : OPEN_BRACKET}#{exclude_end? ? CLOSE_PAREN : CLOSE_BRACKET}")
|
425
|
+
sql << CLOSE_PAREN
|
426
|
+
else
|
427
|
+
ds.literal_append(sql, unquoted_literal(ds))
|
428
|
+
if s
|
429
|
+
sql << CAST << s.to_s
|
430
|
+
end
|
420
431
|
end
|
421
432
|
end
|
422
433
|
|
data/lib/sequel/model/base.rb
CHANGED
@@ -468,7 +468,7 @@ module Sequel
|
|
468
468
|
# An alias for calling first! on the model's dataset, but with
|
469
469
|
# optimized handling of the single argument case.
|
470
470
|
def first!(*args, &block)
|
471
|
-
first(*args, &block) || raise(Sequel::NoMatchingRow)
|
471
|
+
first(*args, &block) || raise(Sequel::NoMatchingRow.new(dataset))
|
472
472
|
end
|
473
473
|
|
474
474
|
# Clear the setter_methods cache when a module is included, as it
|
@@ -509,11 +509,13 @@ module Sequel
|
|
509
509
|
subclass.instance_variable_set(iv, sup_class_value)
|
510
510
|
end
|
511
511
|
|
512
|
-
|
513
|
-
|
514
|
-
|
515
|
-
|
516
|
-
|
512
|
+
unless ivs.include?("@dataset")
|
513
|
+
if @dataset && self != Model
|
514
|
+
subclass.set_dataset(@dataset.clone, :inherited=>true) rescue nil
|
515
|
+
elsif (n = subclass.name) && !n.to_s.empty?
|
516
|
+
db
|
517
|
+
subclass.set_dataset(subclass.implicit_table_name) rescue nil
|
518
|
+
end
|
517
519
|
end
|
518
520
|
end
|
519
521
|
|
@@ -783,7 +785,7 @@ module Sequel
|
|
783
785
|
|
784
786
|
# Return the model instance with the primary key, or raise NoMatchingRow if there is no matching record.
|
785
787
|
def with_pk!(pk)
|
786
|
-
with_pk(pk) || raise(NoMatchingRow)
|
788
|
+
with_pk(pk) || raise(NoMatchingRow.new(dataset))
|
787
789
|
end
|
788
790
|
|
789
791
|
# Add model methods that call dataset methods
|
@@ -2346,7 +2348,7 @@ module Sequel
|
|
2346
2348
|
# Same as with_pk, but raises NoMatchingRow instead of returning nil if no
|
2347
2349
|
# row matches.
|
2348
2350
|
def with_pk!(pk)
|
2349
|
-
with_pk(pk) || raise(NoMatchingRow)
|
2351
|
+
with_pk(pk) || raise(NoMatchingRow.new(self))
|
2350
2352
|
end
|
2351
2353
|
end
|
2352
2354
|
|
@@ -20,10 +20,10 @@ module Sequel
|
|
20
20
|
# not call any callbacks. If you have any association callbacks,
|
21
21
|
# you probably should not use the setter methods.
|
22
22
|
#
|
23
|
-
# If an association uses the :
|
23
|
+
# If an association uses the :delay_pks option, you can set the associated
|
24
24
|
# pks for new objects, and the setting will not be persisted until after the
|
25
25
|
# object has been created in the database. Additionally, if an association
|
26
|
-
# uses the :
|
26
|
+
# uses the :delay_pks=>:all option, you can set the associated pks for existing
|
27
27
|
# objects, and the setting will not be persisted until after the object has
|
28
28
|
# been saved.
|
29
29
|
#
|
@@ -211,7 +211,7 @@ module Sequel
|
|
211
211
|
|
212
212
|
# Set the sti_key column based on the sti_key_map.
|
213
213
|
def _before_validation
|
214
|
-
if new? && !self[model.sti_key]
|
214
|
+
if new? && model.sti_key && !self[model.sti_key]
|
215
215
|
set_column_value("#{model.sti_key}=", model.sti_key_chooser.call(self))
|
216
216
|
end
|
217
217
|
super
|
data/lib/sequel/version.rb
CHANGED
@@ -3,7 +3,7 @@ module Sequel
|
|
3
3
|
MAJOR = 4
|
4
4
|
# The minor version of Sequel. Bumped for every non-patch level
|
5
5
|
# release, generally around once a month.
|
6
|
-
MINOR =
|
6
|
+
MINOR = 26
|
7
7
|
# The tiny version of Sequel. Usually 0, only bumped for bugfix
|
8
8
|
# releases that fix regressions from previous versions.
|
9
9
|
TINY = 0
|
@@ -107,6 +107,14 @@ describe "PostgreSQL", '#create_table' do
|
|
107
107
|
end.must_raise(Sequel::Error, "can't provide both :temp and :unlogged to create_table")
|
108
108
|
end
|
109
109
|
|
110
|
+
it "should support :if_exists option to drop_column" do
|
111
|
+
@db.create_table(:tmp_dolls){Integer :a; Integer :b}
|
112
|
+
2.times do
|
113
|
+
@db.drop_column :tmp_dolls, :b, :if_exists=>true
|
114
|
+
@db[:tmp_dolls].columns.must_equal [:a]
|
115
|
+
end
|
116
|
+
end if DB.server_version >= 90000
|
117
|
+
|
110
118
|
it "should support pg_loose_count extension" do
|
111
119
|
@db.extension :pg_loose_count
|
112
120
|
@db.create_table(:tmp_dolls){text :name}
|
@@ -3145,6 +3153,15 @@ describe 'PostgreSQL range types' do
|
|
3145
3153
|
v.each{|k,v1| v1.must_be :==, @ra[k].to_a}
|
3146
3154
|
end
|
3147
3155
|
|
3156
|
+
it 'works with current_datetime_timestamp extension' do
|
3157
|
+
ds = @db.dataset.extension(:current_datetime_timestamp)
|
3158
|
+
tsr = ds.get(Sequel.pg_range(ds.current_datetime..ds.current_datetime, :tstzrange))
|
3159
|
+
if @native
|
3160
|
+
tsr.begin.must_be_kind_of Time
|
3161
|
+
tsr.end.must_be_kind_of Time
|
3162
|
+
end
|
3163
|
+
end
|
3164
|
+
|
3148
3165
|
it 'operations/functions with pg_range_ops' do
|
3149
3166
|
Sequel.extension :pg_range_ops
|
3150
3167
|
|
data/spec/core/dataset_spec.rb
CHANGED
@@ -869,9 +869,16 @@ describe "Dataset#group_by" do
|
|
869
869
|
@dataset.group(:type_id, :b).group_cube.select_sql.must_equal "SELECT * FROM test GROUP BY type_id, b WITH CUBE"
|
870
870
|
end
|
871
871
|
|
872
|
-
it "should
|
872
|
+
it "should support a #grouping_sets method if the database supports it" do
|
873
|
+
meta_def(@dataset, :supports_grouping_sets?){true}
|
874
|
+
@dataset.group(:type_id).grouping_sets.select_sql.must_equal "SELECT * FROM test GROUP BY GROUPING SETS((type_id))"
|
875
|
+
@dataset.group([:type_id, :b], :type_id, []).grouping_sets.select_sql.must_equal "SELECT * FROM test GROUP BY GROUPING SETS((type_id, b), (type_id), ())"
|
876
|
+
end
|
877
|
+
|
878
|
+
it "should have #group_* methods raise an Error if not supported it" do
|
873
879
|
proc{@dataset.group(:type_id).group_rollup}.must_raise(Sequel::Error)
|
874
880
|
proc{@dataset.group(:type_id).group_cube}.must_raise(Sequel::Error)
|
881
|
+
proc{@dataset.group(:type_id).grouping_sets}.must_raise(Sequel::Error)
|
875
882
|
end
|
876
883
|
end
|
877
884
|
|
@@ -2580,6 +2587,17 @@ describe "Dataset #first!" do
|
|
2580
2587
|
it "should raise NoMatchingRow exception if no rows match" do
|
2581
2588
|
proc{Sequel.mock[:t].first!}.must_raise(Sequel::NoMatchingRow)
|
2582
2589
|
end
|
2590
|
+
|
2591
|
+
it "saves a reference to the dataset with the exception to allow further processing" do
|
2592
|
+
dataset = Sequel.mock[:t]
|
2593
|
+
begin
|
2594
|
+
dataset.first!
|
2595
|
+
rescue Sequel::NoMatchingRow => e
|
2596
|
+
e.dataset.must_equal(dataset)
|
2597
|
+
end
|
2598
|
+
proc{raise Sequel::NoMatchingRow, 'test'}.must_raise Sequel::NoMatchingRow
|
2599
|
+
proc{raise Sequel::NoMatchingRow.new('test')}.must_raise Sequel::NoMatchingRow
|
2600
|
+
end
|
2583
2601
|
end
|
2584
2602
|
|
2585
2603
|
describe "Dataset compound operations" do
|
@@ -4877,6 +4895,10 @@ describe "Frozen Datasets" do
|
|
4877
4895
|
@ds.clone.must_be :frozen?
|
4878
4896
|
end
|
4879
4897
|
|
4898
|
+
it "should be equal to unfrozen ones" do
|
4899
|
+
@ds.must_equal @ds.db[:test]
|
4900
|
+
end
|
4901
|
+
|
4880
4902
|
it "should have dups not be frozen" do
|
4881
4903
|
@ds.dup.wont_be :frozen?
|
4882
4904
|
end
|
@@ -694,6 +694,6 @@ describe "Postgres extensions integration" do
|
|
694
694
|
|
695
695
|
it "Range#pg_range should return an PGRange" do
|
696
696
|
@db.literal((1..2).pg_range).must_equal "'[1,2]'"
|
697
|
-
@db.literal((1..2).pg_range(:int4range)).must_equal "
|
697
|
+
@db.literal((1..2).pg_range(:int4range)).must_equal "int4range(1,2,'[]')"
|
698
698
|
end
|
699
699
|
end
|
@@ -58,11 +58,11 @@ describe "class_table_inheritance plugin" do
|
|
58
58
|
Staff.simple_table.must_equal nil
|
59
59
|
end
|
60
60
|
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
61
|
+
it "should have working row_proc if using set_dataset in subclass to remove columns" do
|
62
|
+
Manager.set_dataset(Manager.dataset.select(*(Manager.columns - [:blah])))
|
63
|
+
Manager.dataset._fetch = {:id=>1, :kind=>'Ceo'}
|
64
|
+
Manager[1].must_equal Ceo.load(:id=>1, :kind=>'Ceo')
|
65
|
+
end
|
66
66
|
|
67
67
|
it "should use a joined dataset in subclasses" do
|
68
68
|
Employee.dataset.sql.must_equal 'SELECT * FROM employees'
|
@@ -276,3 +276,145 @@ describe "class_table_inheritance plugin" do
|
|
276
276
|
@db.sqls.must_equal ['SELECT employees.id, employees.name, employees.kind, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id) WHERE (staff.manager_id = 3)']
|
277
277
|
end
|
278
278
|
end
|
279
|
+
|
280
|
+
describe "class_table_inheritance plugin without sti_key" do
|
281
|
+
before do
|
282
|
+
@db = Sequel.mock(:autoid=>proc{|sql| 1})
|
283
|
+
def @db.supports_schema_parsing?() true end
|
284
|
+
def @db.schema(table, opts={})
|
285
|
+
{:employees=>[[:id, {:primary_key=>true, :type=>:integer}], [:name, {:type=>:string}]],
|
286
|
+
:managers=>[[:id, {:type=>:integer}], [:num_staff, {:type=>:integer}]],
|
287
|
+
:executives=>[[:id, {:type=>:integer}], [:num_managers, {:type=>:integer}]],
|
288
|
+
:staff=>[[:id, {:type=>:integer}], [:manager_id, {:type=>:integer}]],
|
289
|
+
}[table.is_a?(Sequel::Dataset) ? table.first_source_table : table]
|
290
|
+
end
|
291
|
+
@db.extend_datasets do
|
292
|
+
def columns
|
293
|
+
{[:employees]=>[:id, :name],
|
294
|
+
[:managers]=>[:id, :num_staff],
|
295
|
+
[:executives]=>[:id, :num_managers],
|
296
|
+
[:staff]=>[:id, :manager_id],
|
297
|
+
[:employees, :managers]=>[:id, :name, :num_staff],
|
298
|
+
[:employees, :managers, :executives]=>[:id, :name, :num_staff, :num_managers],
|
299
|
+
[:employees, :staff]=>[:id, :name, :manager_id],
|
300
|
+
}[opts[:from] + (opts[:join] || []).map{|x| x.table}]
|
301
|
+
end
|
302
|
+
end
|
303
|
+
class ::Employee < Sequel::Model(@db)
|
304
|
+
def _save_refresh; @values[:id] = 1 end
|
305
|
+
def self.columns
|
306
|
+
dataset.columns
|
307
|
+
end
|
308
|
+
plugin :class_table_inheritance, :table_map=>{:Staff=>:staff}
|
309
|
+
end
|
310
|
+
class ::Manager < Employee
|
311
|
+
one_to_many :staff_members, :class=>:Staff
|
312
|
+
end
|
313
|
+
class ::Executive < Manager
|
314
|
+
end
|
315
|
+
class ::Staff < Employee
|
316
|
+
many_to_one :manager
|
317
|
+
end
|
318
|
+
@ds = Employee.dataset
|
319
|
+
@db.sqls
|
320
|
+
end
|
321
|
+
after do
|
322
|
+
Object.send(:remove_const, :Executive)
|
323
|
+
Object.send(:remove_const, :Manager)
|
324
|
+
Object.send(:remove_const, :Staff)
|
325
|
+
Object.send(:remove_const, :Employee)
|
326
|
+
end
|
327
|
+
|
328
|
+
it "should have simple_table = nil for all subclasses" do
|
329
|
+
Manager.simple_table.must_equal nil
|
330
|
+
Executive.simple_table.must_equal nil
|
331
|
+
Staff.simple_table.must_equal nil
|
332
|
+
end
|
333
|
+
|
334
|
+
it "should have working row_proc if using set_dataset in subclass to remove columns" do
|
335
|
+
Manager.set_dataset(Manager.dataset.select(*(Manager.columns - [:blah])))
|
336
|
+
Manager.dataset._fetch = {:id=>1}
|
337
|
+
Manager[1].must_equal Manager.load(:id=>1)
|
338
|
+
end
|
339
|
+
|
340
|
+
it "should use a joined dataset in subclasses" do
|
341
|
+
Employee.dataset.sql.must_equal 'SELECT * FROM employees'
|
342
|
+
Manager.dataset.sql.must_equal 'SELECT employees.id, employees.name, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id)'
|
343
|
+
Executive.dataset.sql.must_equal 'SELECT employees.id, employees.name, managers.num_staff, executives.num_managers FROM employees INNER JOIN managers ON (managers.id = employees.id) INNER JOIN executives ON (executives.id = managers.id)'
|
344
|
+
Staff.dataset.sql.must_equal 'SELECT employees.id, employees.name, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id)'
|
345
|
+
end
|
346
|
+
|
347
|
+
it "should return rows with the current class if cti_key is nil" do
|
348
|
+
Employee.plugin(:class_table_inheritance)
|
349
|
+
Employee.dataset._fetch = [{}]
|
350
|
+
Employee.first.class.must_equal Employee
|
351
|
+
end
|
352
|
+
|
353
|
+
|
354
|
+
it "should include schema for columns for tables for ancestor classes" do
|
355
|
+
Employee.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string})
|
356
|
+
Manager.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :num_staff=>{:type=>:integer})
|
357
|
+
Executive.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :num_staff=>{:type=>:integer}, :num_managers=>{:type=>:integer})
|
358
|
+
Staff.db_schema.must_equal(:id=>{:primary_key=>true, :type=>:integer}, :name=>{:type=>:string}, :manager_id=>{:type=>:integer})
|
359
|
+
end
|
360
|
+
|
361
|
+
it "should use the correct primary key (which should have the same name in all subclasses)" do
|
362
|
+
[Employee, Manager, Executive, Staff].each{|c| c.primary_key.must_equal :id}
|
363
|
+
end
|
364
|
+
|
365
|
+
it "should have table_name return the table name of the most specific table" do
|
366
|
+
Employee.table_name.must_equal :employees
|
367
|
+
Manager.table_name.must_equal :managers
|
368
|
+
Executive.table_name.must_equal :executives
|
369
|
+
Staff.table_name.must_equal :staff
|
370
|
+
end
|
371
|
+
|
372
|
+
it "should delete the correct rows from all tables when deleting" do
|
373
|
+
Executive.load(:id=>1).delete
|
374
|
+
@db.sqls.must_equal ["DELETE FROM executives WHERE (id = 1)", "DELETE FROM managers WHERE (id = 1)", "DELETE FROM employees WHERE (id = 1)"]
|
375
|
+
end
|
376
|
+
|
377
|
+
it "should not allow deletion of frozen object" do
|
378
|
+
o = Executive.load(:id=>1)
|
379
|
+
o.freeze
|
380
|
+
proc{o.delete}.must_raise(Sequel::Error)
|
381
|
+
@db.sqls.must_equal []
|
382
|
+
end
|
383
|
+
|
384
|
+
it "should insert the correct rows into all tables when inserting" do
|
385
|
+
Executive.create(:num_managers=>3, :num_staff=>2, :name=>'E')
|
386
|
+
sqls = @db.sqls
|
387
|
+
sqls.length.must_equal 3
|
388
|
+
sqls[0].must_match(/INSERT INTO employees \(name\) VALUES \('E'\)/)
|
389
|
+
sqls[1].must_match(/INSERT INTO managers \((num_staff|id), (num_staff|id)\) VALUES \([12], [12]\)/)
|
390
|
+
sqls[2].must_match(/INSERT INTO executives \((num_managers|id), (num_managers|id)\) VALUES \([13], [13]\)/)
|
391
|
+
end
|
392
|
+
|
393
|
+
it "should insert the correct rows into all tables with a given primary key" do
|
394
|
+
e = Executive.new(:num_managers=>3, :num_staff=>2, :name=>'E')
|
395
|
+
e.id = 2
|
396
|
+
e.save
|
397
|
+
sqls = @db.sqls
|
398
|
+
sqls.length.must_equal 3
|
399
|
+
sqls[0].must_match(/INSERT INTO employees \((name|id), (name|id)\) VALUES \(('E'|2), ('E'|2)\)/)
|
400
|
+
sqls[1].must_match(/INSERT INTO managers \((num_staff|id), (num_staff|id)\) VALUES \(2, 2\)/)
|
401
|
+
sqls[2].must_match(/INSERT INTO executives \((num_managers|id), (num_managers|id)\) VALUES \([23], [23]\)/)
|
402
|
+
end
|
403
|
+
|
404
|
+
it "should update the correct rows in all tables when updating" do
|
405
|
+
Executive.load(:id=>2).update(:num_managers=>3, :num_staff=>2, :name=>'E')
|
406
|
+
@db.sqls.must_equal ["UPDATE employees SET name = 'E' WHERE (id = 2)", "UPDATE managers SET num_staff = 2 WHERE (id = 2)", "UPDATE executives SET num_managers = 3 WHERE (id = 2)"]
|
407
|
+
end
|
408
|
+
|
409
|
+
it "should handle many_to_one relationships correctly" do
|
410
|
+
Manager.dataset._fetch = {:id=>3, :name=>'E', :num_staff=>3}
|
411
|
+
Staff.load(:manager_id=>3).manager.must_equal Manager.load(:id=>3, :name=>'E', :num_staff=>3)
|
412
|
+
@db.sqls.must_equal ['SELECT employees.id, employees.name, managers.num_staff FROM employees INNER JOIN managers ON (managers.id = employees.id) WHERE (managers.id = 3) LIMIT 1']
|
413
|
+
end
|
414
|
+
|
415
|
+
it "should handle one_to_many relationships correctly" do
|
416
|
+
Staff.dataset._fetch = {:id=>1, :name=>'S', :manager_id=>3}
|
417
|
+
Executive.load(:id=>3).staff_members.must_equal [Staff.load(:id=>1, :name=>'S', :manager_id=>3)]
|
418
|
+
@db.sqls.must_equal ['SELECT employees.id, employees.name, staff.manager_id FROM employees INNER JOIN staff ON (staff.id = employees.id) WHERE (staff.manager_id = 3)']
|
419
|
+
end
|
420
|
+
end
|
@@ -511,7 +511,7 @@ describe "Postgres extensions integration" do
|
|
511
511
|
|
512
512
|
it "Range#pg_range should return an PGRange" do
|
513
513
|
@db.literal((1..2).pg_range).must_equal "'[1,2]'"
|
514
|
-
@db.literal((1..2).pg_range(:int4range)).must_equal "
|
514
|
+
@db.literal((1..2).pg_range(:int4range)).must_equal "int4range(1,2,'[]')"
|
515
515
|
end
|
516
516
|
end
|
517
517
|
else
|
@@ -26,7 +26,7 @@ describe "Sequel::Postgres::RangeOp" do
|
|
26
26
|
end
|
27
27
|
|
28
28
|
it "PGRange#op should return a RangeOp" do
|
29
|
-
@ds.literal(Sequel.pg_range(1..2, :numrange).op.lower).must_equal "lower(
|
29
|
+
@ds.literal(Sequel.pg_range(1..2, :numrange).op.lower).must_equal "lower(numrange(1,2,'[]'))"
|
30
30
|
end
|
31
31
|
|
32
32
|
it "should define methods for all of the PostgreSQL range operators" do
|
@@ -36,7 +36,7 @@ describe "pg_range extension" do
|
|
36
36
|
@db.literal(@R.new(1, 2, :exclude_end=>true)).must_equal "'[1,2)'"
|
37
37
|
@db.literal(@R.new(nil, 2)).must_equal "'[,2]'"
|
38
38
|
@db.literal(@R.new(1, nil)).must_equal "'[1,]'"
|
39
|
-
@db.literal(@R.new(1, 2, :db_type=>'int8range')).must_equal "
|
39
|
+
@db.literal(@R.new(1, 2, :db_type=>'int8range')).must_equal "int8range(1,2,'[]')"
|
40
40
|
@db.literal(@R.new(nil, nil, :empty=>true)).must_equal "'empty'"
|
41
41
|
@db.literal(@R.new("", 2)).must_equal "'[\"\",2]'"
|
42
42
|
end
|
@@ -239,6 +239,10 @@ describe "Simple Dataset operations" do
|
|
239
239
|
|
240
240
|
it "should fetch correctly with a limit and offset without an order" do
|
241
241
|
@ds.limit(2, 1).all.must_equal []
|
242
|
+
@ds.join(:items___i, :id=>:id).select(:items__id___s, :i__id___id2).limit(2, 1).all.must_equal []
|
243
|
+
@ds.join(:items___i, :id=>:id).select(:items__id).limit(2, 1).all.must_equal []
|
244
|
+
@ds.join(:items___i, :id=>:id).select(Sequel.qualify(:items, :id)).limit(2, 1).all.must_equal []
|
245
|
+
@ds.join(:items___i, :id=>:id).select(Sequel.qualify(:items, :id).as(:s)).limit(2, 1).all.must_equal []
|
242
246
|
end
|
243
247
|
|
244
248
|
it "should be orderable by column number" do
|
@@ -957,14 +961,19 @@ describe "Sequel::Dataset convenience methods" do
|
|
957
961
|
end
|
958
962
|
|
959
963
|
it "#group_rollup should include hierarchy of groupings" do
|
960
|
-
@ds.group_by(:a).group_rollup.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.
|
961
|
-
@ds.group_by(:a, :b).group_rollup.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.
|
964
|
+
@ds.group_by(:a).group_rollup.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, 17, 27], [1, 10, 16], [2, 7, 11]]
|
965
|
+
@ds.group_by(:a, :b).group_rollup.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, nil, 27], [1, nil, 16], [1, 3, 11], [1, 4, 5], [2, nil, 11], [2, 3, 5], [2, 4, 6]]
|
962
966
|
end if DB.dataset.supports_group_rollup?
|
963
967
|
|
964
968
|
it "#group_cube should include all combinations of groupings" do
|
965
|
-
@ds.group_by(:a).group_cube.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.
|
966
|
-
@ds.group_by(:a, :b).group_cube.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.
|
969
|
+
@ds.group_by(:a).group_cube.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, 17, 27], [1, 10, 16], [2, 7, 11]]
|
970
|
+
@ds.group_by(:a, :b).group_cube.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, nil, 27], [nil, 3, 16], [nil, 4, 11], [1, nil, 16], [1, 3, 11], [1, 4, 5], [2, nil, 11], [2, 3, 5], [2, 4, 6]]
|
967
971
|
end if DB.dataset.supports_group_cube?
|
972
|
+
|
973
|
+
it "#grouping_sets should include sets specified in group" do
|
974
|
+
@ds.group_by(:a, []).grouping_sets.select_map([:a, Sequel.function(:sum, :b).cast(Integer).as(:b), Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, 17, 27], [1, 10, 16], [2, 7, 11]]
|
975
|
+
@ds.group_by([:a, :b], :a, :b, []).grouping_sets.select_map([:a, :b, Sequel.function(:sum, :c).cast(Integer).as(:c)]).sort_by{|x| x.map(&:to_i)}.must_equal [[nil, nil, 27], [nil, 3, 16], [nil, 4, 11], [1, nil, 16], [1, 3, 11], [1, 4, 5], [2, nil, 11], [2, 3, 5], [2, 4, 6]]
|
976
|
+
end if DB.dataset.supports_grouping_sets?
|
968
977
|
end
|
969
978
|
|
970
979
|
describe "Sequel::Dataset convenience methods" do
|
@@ -1971,6 +1971,7 @@ describe "Sequel::Plugins::ConstraintValidations" do
|
|
1971
1971
|
before(:all) do
|
1972
1972
|
@db = DB
|
1973
1973
|
@db.extension(:constraint_validations)
|
1974
|
+
@db.drop_table?(:sequel_constraint_validations)
|
1974
1975
|
@db.create_constraint_validations_table
|
1975
1976
|
@ds = @db[:cv_test]
|
1976
1977
|
@regexp = regexp = @db.dataset.supports_regexp?
|
data/spec/model/base_spec.rb
CHANGED
@@ -91,6 +91,13 @@ describe Sequel::Model, "dataset" do
|
|
91
91
|
it "should raise if no dataset is explicitly set and the class is anonymous" do
|
92
92
|
proc {@b.dataset}.must_raise(Sequel::Error)
|
93
93
|
end
|
94
|
+
|
95
|
+
it "should not override dataset explicitly set when subclassing" do
|
96
|
+
sc = Class.new(::Elephant) do
|
97
|
+
set_dataset :foo
|
98
|
+
end
|
99
|
+
sc.table_name.must_equal :foo
|
100
|
+
end
|
94
101
|
end
|
95
102
|
|
96
103
|
describe Sequel::Model, "implicit table names" do
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sequel
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 4.
|
4
|
+
version: 4.26.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Jeremy Evans
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-
|
11
|
+
date: 2015-09-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: minitest
|
@@ -226,6 +226,7 @@ extra_rdoc_files:
|
|
226
226
|
- doc/release_notes/4.23.0.txt
|
227
227
|
- doc/release_notes/4.24.0.txt
|
228
228
|
- doc/release_notes/4.25.0.txt
|
229
|
+
- doc/release_notes/4.26.0.txt
|
229
230
|
files:
|
230
231
|
- CHANGELOG
|
231
232
|
- MIT-LICENSE
|
@@ -339,6 +340,7 @@ files:
|
|
339
340
|
- doc/release_notes/4.23.0.txt
|
340
341
|
- doc/release_notes/4.24.0.txt
|
341
342
|
- doc/release_notes/4.25.0.txt
|
343
|
+
- doc/release_notes/4.26.0.txt
|
342
344
|
- doc/release_notes/4.3.0.txt
|
343
345
|
- doc/release_notes/4.4.0.txt
|
344
346
|
- doc/release_notes/4.5.0.txt
|
@@ -847,7 +849,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
847
849
|
version: '0'
|
848
850
|
requirements: []
|
849
851
|
rubyforge_project:
|
850
|
-
rubygems_version: 2.4.5
|
852
|
+
rubygems_version: 2.4.5.1
|
851
853
|
signing_key:
|
852
854
|
specification_version: 4
|
853
855
|
summary: The Database Toolkit for Ruby
|