sequel 4.8.0 → 4.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG +48 -0
- data/doc/association_basics.rdoc +1 -1
- data/doc/opening_databases.rdoc +4 -0
- data/doc/postgresql.rdoc +27 -3
- data/doc/release_notes/4.9.0.txt +190 -0
- data/doc/security.rdoc +1 -1
- data/doc/testing.rdoc +2 -2
- data/doc/validations.rdoc +8 -0
- data/lib/sequel/adapters/jdbc.rb +5 -3
- data/lib/sequel/adapters/jdbc/derby.rb +2 -8
- data/lib/sequel/adapters/jdbc/h2.rb +2 -13
- data/lib/sequel/adapters/jdbc/hsqldb.rb +2 -16
- data/lib/sequel/adapters/mysql2.rb +11 -1
- data/lib/sequel/adapters/postgres.rb +33 -10
- data/lib/sequel/adapters/shared/db2.rb +2 -10
- data/lib/sequel/adapters/shared/mssql.rb +10 -8
- data/lib/sequel/adapters/shared/oracle.rb +9 -24
- data/lib/sequel/adapters/shared/postgres.rb +32 -9
- data/lib/sequel/adapters/shared/sqlanywhere.rb +2 -4
- data/lib/sequel/adapters/shared/sqlite.rb +4 -7
- data/lib/sequel/database/schema_methods.rb +15 -0
- data/lib/sequel/dataset.rb +1 -1
- data/lib/sequel/dataset/actions.rb +159 -27
- data/lib/sequel/dataset/graph.rb +29 -7
- data/lib/sequel/dataset/misc.rb +6 -0
- data/lib/sequel/dataset/placeholder_literalizer.rb +164 -0
- data/lib/sequel/dataset/query.rb +2 -0
- data/lib/sequel/dataset/sql.rb +103 -91
- data/lib/sequel/extensions/current_datetime_timestamp.rb +57 -0
- data/lib/sequel/extensions/pg_array.rb +68 -106
- data/lib/sequel/extensions/pg_hstore.rb +5 -5
- data/lib/sequel/extensions/schema_dumper.rb +49 -49
- data/lib/sequel/model.rb +4 -2
- data/lib/sequel/model/associations.rb +1 -1
- data/lib/sequel/model/base.rb +136 -3
- data/lib/sequel/model/errors.rb +6 -0
- data/lib/sequel/plugins/defaults_setter.rb +1 -1
- data/lib/sequel/plugins/eager_each.rb +9 -0
- data/lib/sequel/plugins/nested_attributes.rb +2 -2
- data/lib/sequel/plugins/timestamps.rb +2 -2
- data/lib/sequel/plugins/touch.rb +2 -2
- data/lib/sequel/sql.rb +20 -15
- data/lib/sequel/version.rb +1 -1
- data/spec/adapters/postgres_spec.rb +70 -8
- data/spec/core/dataset_spec.rb +172 -27
- data/spec/core/expression_filters_spec.rb +3 -3
- data/spec/core/object_graph_spec.rb +17 -1
- data/spec/core/placeholder_literalizer_spec.rb +128 -0
- data/spec/core/schema_spec.rb +54 -0
- data/spec/extensions/current_datetime_timestamp_spec.rb +27 -0
- data/spec/extensions/defaults_setter_spec.rb +12 -0
- data/spec/extensions/eager_each_spec.rb +6 -0
- data/spec/extensions/nested_attributes_spec.rb +14 -2
- data/spec/extensions/pg_array_spec.rb +15 -7
- data/spec/extensions/shared_caching_spec.rb +5 -5
- data/spec/extensions/timestamps_spec.rb +9 -0
- data/spec/extensions/touch_spec.rb +9 -0
- data/spec/integration/database_test.rb +1 -1
- data/spec/integration/dataset_test.rb +27 -5
- data/spec/model/eager_loading_spec.rb +32 -0
- data/spec/model/model_spec.rb +119 -9
- metadata +8 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 54a1904977f9d5f4de5e523dbd0ba244e8ef6203
|
4
|
+
data.tar.gz: 404f40b96e9fc774587485c727186aa6ab2c54b7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 323f3bb4faea4a371565fecf027aa190a8b4b8232b2da1a0205fcdbccaac20decddbed540a992a4a7c19df78904483b4da3e70be408f3c432c4288357c5b37d3
|
7
|
+
data.tar.gz: cef071ae2cac67201d942e674905dfb266cf9054bf80f3d1aed43ce9e2e032a71325101406ce689592c757335a8f2471ce14f8102f4e178b37e6dd9777c12b40
|
data/CHANGELOG
CHANGED
@@ -1,3 +1,51 @@
|
|
1
|
+
=== 4.9.0 (2014-04-01)
|
2
|
+
|
3
|
+
* Recognize CHECK constraint violations on newer versions of SQLite (jeremyevans)
|
4
|
+
|
5
|
+
* Do not attempt to eager load when calling Dataset#columns in the eager_each plugin (jeremyevans)
|
6
|
+
|
7
|
+
* Support :driver option for jdbc adapter, for specifying driver class for cases where getConnection doesn't work (jeremyevans) (#785)
|
8
|
+
|
9
|
+
* Massive speedup for PostgreSQL array parser (jeremyevans) (#788)
|
10
|
+
|
11
|
+
* Add current_datetime_timestamp extension, for current Time/DateTime instances that are literalized as CURRENT_TIMESTAMP (jeremyevans)
|
12
|
+
|
13
|
+
* Recognize additional unique constraint violations on SQLite (jeremyevans) (#782)
|
14
|
+
|
15
|
+
* Don't remove column value when validating nested attributes for one_to_* association where association foreign key is the model's primary key (jeremyevans)
|
16
|
+
|
17
|
+
* Add Dataset#disable_insert_returning on PostgreSQL for skipping implicit use of RETURNING (jeremyevans)
|
18
|
+
|
19
|
+
* Automatically optimize Model.[], .with_pk, and .with_pk! for models with composite keys (jeremyevans)
|
20
|
+
|
21
|
+
* Automatically optimize Model.[] when called with a hash (jeremyevans)
|
22
|
+
|
23
|
+
* Automatically optimize Model.find, .first, and .first! when called with a single argument (jeremyevans)
|
24
|
+
|
25
|
+
* Add Model.finder for creating optimized finder methods using Dataset::PlaceholderLiteralizer (jeremyevans)
|
26
|
+
|
27
|
+
* Add Dataset::PlaceholderLiteralizer optimization framework (jeremyevans)
|
28
|
+
|
29
|
+
* Add Dataset#with_sql_{each,all,first,single_value,insert,update} optimized methods (jeremyevans)
|
30
|
+
|
31
|
+
* Make pg_array extension use correct type when typecasting column values for smallint, oid, real, character, and varchar arrays (jeremyevans)
|
32
|
+
|
33
|
+
* Make Database#column_schema_to_ruby_default a public method in the schema_dumper extension (jeremyevans) (#776)
|
34
|
+
|
35
|
+
* Fix multiple corner cases in the eager_graph support (jeremyevans) (#771)
|
36
|
+
|
37
|
+
* Use streaming to implement paging for Dataset#paged_each in the mysql2 adapter (jeremyevans)
|
38
|
+
|
39
|
+
* Use a cursor to implement paging for Dataset#paged_each in the postgres adapter (jeremyevans)
|
40
|
+
|
41
|
+
* Add Database#create_join_table? and #create_join_table! for consistency (jeremyevans)
|
42
|
+
|
43
|
+
* Add Dataset#where_current_of to the postgres adapter for supporting updating rows based on a cursor's current position (jeremyevans)
|
44
|
+
|
45
|
+
* Add Dataset#use_cursor :hold option in the postgres adapter for supporting cursor use outside of a transaction (jeremyevans)
|
46
|
+
|
47
|
+
* Add Dataset#paged_each :strategy=>:filter option for increased performance (jeremyevans)
|
48
|
+
|
1
49
|
=== 4.8.0 (2014-03-01)
|
2
50
|
|
3
51
|
* Add SQL::AliasedExpression#alias alias for #aliaz (jeremyevans)
|
data/doc/association_basics.rdoc
CHANGED
data/doc/opening_databases.rdoc
CHANGED
@@ -283,6 +283,10 @@ The following additional options are supported:
|
|
283
283
|
Setting to false roughly doubles performance when selecting large numbers of rows.
|
284
284
|
Note that you can't provide this option inside the connection string (as that is passed
|
285
285
|
directly to JDBC), you have to pass it as a separate option.
|
286
|
+
:driver :: Specify the Java driver class to use to connect to the database. This only has
|
287
|
+
an effect if the database type is not recognized from the connection string,
|
288
|
+
and only helps cases where <tt>java.sql.DriverManager.getConnection</tt> does not
|
289
|
+
return a connection.
|
286
290
|
:login_timeout :: Set the login timeout on the JDBC connection (in seconds).
|
287
291
|
|
288
292
|
=== mysql
|
data/doc/postgresql.rdoc
CHANGED
@@ -44,14 +44,15 @@ pg_range :: ranges (for any scalar type), as a ruby Range-like object
|
|
44
44
|
pg_row :: row-valued/composite types, as a ruby Hash-like or Sequel::Model object
|
45
45
|
|
46
46
|
In general, these extensions just add support for Database objects to return retrieved
|
47
|
-
column values as the appropriate type (<tt>postgres only</tt>), and support for literalizing
|
48
|
-
the objects correctly for use in an SQL string, or using them as bound variable values (<tt>postgres/pg only</tt>).
|
47
|
+
column values as the appropriate type (<tt>postgres and jdbc/postgres only</tt>), and support for literalizing
|
48
|
+
the objects correctly for use in an SQL string, or using them as bound variable values (<tt>postgres/pg and jdbc/postgres only</tt>).
|
49
49
|
|
50
50
|
There are also type-specific extensions that make it easy to use database functions
|
51
51
|
and operators related to the type. These extensions are:
|
52
52
|
|
53
53
|
pg_array_ops :: array-related functions and operators
|
54
54
|
pg_hstore_ops :: hstore-related functions and operators
|
55
|
+
pg_json_ops :: json-related functions and operators
|
55
56
|
pg_range_ops :: range-related functions and operators
|
56
57
|
pg_row_ops :: row-valued/composite type syntax support
|
57
58
|
|
@@ -75,7 +76,7 @@ You can also add exclusion constraints in +alter_table+ blocks using add_exclusi
|
|
75
76
|
end
|
76
77
|
# ALTER TABLE "table" ADD CONSTRAINT "table_during_excl" EXCLUDE USING gist ("during" WITH &&)
|
77
78
|
|
78
|
-
=== Adding Foreign Key Constraints Without Initial Validation
|
79
|
+
=== Adding Foreign Key and Check Constraints Without Initial Validation
|
79
80
|
|
80
81
|
You can add a <tt>:not_valid=>true</tt> option when adding constraints to existing tables so
|
81
82
|
that it doesn't check if all current rows are valid:
|
@@ -83,16 +84,21 @@ that it doesn't check if all current rows are valid:
|
|
83
84
|
DB.alter_table(:table) do
|
84
85
|
# Assumes t_id column already exists
|
85
86
|
add_foreign_key([:t_id], :table, :not_valid=>true, :name=>:table_fk)
|
87
|
+
|
88
|
+
constraint({:name=>:col_123, :not_valid=>true}, :col=>[1,2,3])
|
86
89
|
end
|
87
90
|
# ALTER TABLE "table" ADD CONSTRAINT "table_fk" FOREIGN KEY ("t_id") REFERENCES "table" NOT VALID
|
91
|
+
# ALTER TABLE "table" ADD CONSTRAINT "col_123" CHECK (col IN (1, 2, 3)) NOT VALID
|
88
92
|
|
89
93
|
Such constraints will be enforced for newly inserted and updated rows, but not for existing rows. After
|
90
94
|
all existing rows have been fixed, you can validate the constraint:
|
91
95
|
|
92
96
|
DB.alter_table(:table) do
|
93
97
|
validate_constraint(:table_fk)
|
98
|
+
validate_constraint(:col_123)
|
94
99
|
end
|
95
100
|
# ALTER TABLE "table" VALIDATE CONSTRAINT "table_fk"
|
101
|
+
# ALTER TABLE "table" VALIDATE CONSTRAINT "col_123"
|
96
102
|
|
97
103
|
=== Creating Indexes Concurrently
|
98
104
|
|
@@ -202,6 +208,17 @@ without keeping all rows in memory:
|
|
202
208
|
# CLOSE sequel_cursor
|
203
209
|
# COMMIT
|
204
210
|
|
211
|
+
This support is used by default when using <tt>Dataset#paged_each</tt>.
|
212
|
+
|
213
|
+
Using cursors, it is possible to update individual rows of a large dataset
|
214
|
+
easily using the <tt>:rows_per_fetch=>1</tt> option in conjunction with
|
215
|
+
<tt>Dataset#where_current_of</tt>. This is useful if the logic needed to
|
216
|
+
update the rows exists in the application and not in the database:
|
217
|
+
|
218
|
+
ds.use_cursor(:rows_per_fetch=>1).each do |row|
|
219
|
+
ds.where_current_of.update(:col=>new_col_value(row))
|
220
|
+
end
|
221
|
+
|
205
222
|
=== Truncate Modifiers
|
206
223
|
|
207
224
|
Sequel supports PostgreSQL-specific truncate options:
|
@@ -261,6 +278,10 @@ notifications:
|
|
261
278
|
|
262
279
|
DB.listen(:channel, :loop=>true){|channel| p channel}
|
263
280
|
|
281
|
+
The +pg_static_cache_updater+ extension uses this support to automatically update
|
282
|
+
the caches for models using the +static_cache+ plugin. Look at the documentation of that
|
283
|
+
plugin for details.
|
284
|
+
|
264
285
|
=== Locking Tables
|
265
286
|
|
266
287
|
Sequel makes it easy to lock tables, though it is generally better to let the database
|
@@ -300,3 +321,6 @@ Then you can stream individual datasets:
|
|
300
321
|
Or stream all datasets by default:
|
301
322
|
|
302
323
|
DB.stream_all_queries = true
|
324
|
+
|
325
|
+
When streaming is enabled, <tt>Dataset#paged_each</tt> will use streaming to implement
|
326
|
+
paging.
|
@@ -0,0 +1,190 @@
|
|
1
|
+
= Performance Enhancements
|
2
|
+
|
3
|
+
* Dataset::PlaceholderLiteralizer has been added as an optimization
|
4
|
+
framework. This allows you to record changes to a given dataset
|
5
|
+
using placeholder arguments, and later quickly execute the query
|
6
|
+
providing values for the placeholders. This is similar in idea
|
7
|
+
to prepared statements, except that the SQL for each query can
|
8
|
+
change depending on the values for the placeholders.
|
9
|
+
|
10
|
+
Using this optimization framework, generating the SQL for query
|
11
|
+
is about 3x faster, and since SQL generation time is a significant
|
12
|
+
portion of total time for simple queries, simple queries can
|
13
|
+
execute up to 50% faster.
|
14
|
+
|
15
|
+
There are two APIs for this optimization framework. There is a
|
16
|
+
lower level dataset API:
|
17
|
+
|
18
|
+
loader = Sequel::Dataset::PlaceholderLiteralizer.
|
19
|
+
loader(DB[:items]) do |pl, ds|
|
20
|
+
ds.where(:id=>pl.arg).exclude(:name=>pl.arg).limit(1)
|
21
|
+
end
|
22
|
+
|
23
|
+
loader.first(1, "foo")
|
24
|
+
# SELECT * FROM items WHERE ((id = 1) AND (name != 'foo')) LIMIT 1
|
25
|
+
|
26
|
+
loader.first([1, 2], %w"foo bar")
|
27
|
+
# SELECT * FROM items WHERE ((id IN (1, 2)) AND
|
28
|
+
# (name NOT IN ('foo', 'bar'))) LIMIT 1
|
29
|
+
|
30
|
+
There is also a higher level model API (Model.finder):
|
31
|
+
|
32
|
+
class Item < Sequel::Model
|
33
|
+
# Given class method that returns a dataset
|
34
|
+
def self.by_id_and_not_name(id, not_name)
|
35
|
+
where(:id=>id).exclude(:name=>not_name)
|
36
|
+
end
|
37
|
+
|
38
|
+
# Create optimized method that returns first value
|
39
|
+
finder :by_id_and_not_name
|
40
|
+
end
|
41
|
+
|
42
|
+
# Call optimized method
|
43
|
+
Album.first_by_id_and_not_name(1, 'foo')
|
44
|
+
# SELECT * FROM items WHERE ((id = 1) AND (name != 'foo')) LIMIT 1
|
45
|
+
|
46
|
+
Model.finder defaults to creating a method that returns the first
|
47
|
+
matching row, but using the :type option you can create methods
|
48
|
+
that call each, all, or get. There is also an option to choose the
|
49
|
+
method name (:name), as well as one to specify the number of
|
50
|
+
arguments to use if the method doesn't take a fixed number
|
51
|
+
(:arity).
|
52
|
+
|
53
|
+
Finally, Model.find, .first, and .first! now automatically use an
|
54
|
+
optimized finder if given a single argument. Model.[] uses an
|
55
|
+
optimized finder if given a single hash, and Model.[], .with_pk,
|
56
|
+
and .with_pk! use an optimized finder if the model has a composite
|
57
|
+
primary key. In all of these cases, these methods are about 50%
|
58
|
+
faster than before.
|
59
|
+
|
60
|
+
* The pure-ruby PostgreSQL array parser that ships with Sequel has
|
61
|
+
been replaced with a strscan-based parser. This parser avoids
|
62
|
+
O(n^2) performance for arrays with multibyte strings, and in general
|
63
|
+
is much faster. Parsing an array with a single string with 100,000
|
64
|
+
multibyte characters is about 1000x faster, and now about half the
|
65
|
+
speed of the C implementation in sequel_pg.
|
66
|
+
|
67
|
+
* Dataset#paged_each now has a :strategy=>:filter option that
|
68
|
+
dramatically improves performance, especially if the columns
|
69
|
+
being ordered by are indexed.
|
70
|
+
|
71
|
+
Unfortunately, there are enough corner cases to this approach
|
72
|
+
that it cannot be used by default. At the least, the dataset
|
73
|
+
needs to be selecting the columns it is ordering by, not aliasing
|
74
|
+
the columns it is ordering by in the SELECT clause, not have
|
75
|
+
NULLs in any of the columns being ordered by, and not itself use
|
76
|
+
a limit or offset.
|
77
|
+
|
78
|
+
If you are ordering by expressions that are not simple column
|
79
|
+
values, you can provide a :filter_value option proc that takes the
|
80
|
+
last retrieved row and array of order by expressions, and returns
|
81
|
+
an array of values in the last retrieved row for those order by
|
82
|
+
expressions.
|
83
|
+
|
84
|
+
* In the postgres adapter, Dataset#paged_each now automatically uses
|
85
|
+
a cursor for improved performance.
|
86
|
+
|
87
|
+
* In the mysql2 adapter, Dataset#paged_each now automatically uses
|
88
|
+
streaming for improved performance, if streaming is supported.
|
89
|
+
|
90
|
+
* Dataset#with_sql_{each,all,first,single_value,insert,update}
|
91
|
+
have been added. These methods take specific SQL and execute
|
92
|
+
it on the database, returning the appropriate value. They
|
93
|
+
are significantly faster than the previous approach of
|
94
|
+
with_sql(SQL).{each,all,first,single_value,insert,update},
|
95
|
+
as they don't require cloning the dataset.
|
96
|
+
|
97
|
+
= New Features
|
98
|
+
|
99
|
+
* Database#create_join_table! and #create_join_table? have been added,
|
100
|
+
for consistency with #create_table! and #create_table?.
|
101
|
+
|
102
|
+
* A :hold option has been added to Dataset#use_cursor in the postgres
|
103
|
+
adapter, which uses WITH HOLD in the query, allowing for usage of
|
104
|
+
the cursor outside the enclosing transaction. When :hold is used,
|
105
|
+
Sequel does not automatically use a transaction around the cursor
|
106
|
+
call.
|
107
|
+
|
108
|
+
* Dataset#where_current_of has been added to the postgres adapter,
|
109
|
+
for updating rows based on a cursor's current position. This can
|
110
|
+
be used to update a large dataset where new values depend on
|
111
|
+
some ruby method, without keeping all rows in memory.
|
112
|
+
|
113
|
+
ds = DB[:huge_table]
|
114
|
+
ds.use_cursor(:rows_per_fetch=>1).each do |row|
|
115
|
+
ds.where_current_of.update(:column=>ruby_method(row))
|
116
|
+
end
|
117
|
+
|
118
|
+
* A current_datetime_timestamp extension has been added, for
|
119
|
+
creating Time/DateTime instances that are literalized as
|
120
|
+
CURRENT_TIMESTAMP. When the dataset uses this extension, models
|
121
|
+
that use the touch and timestamps plugins will use
|
122
|
+
CURRENT_TIMESTAMP for the timestamps.
|
123
|
+
|
124
|
+
* The jdbc adapter now supports a :driver option, useful when
|
125
|
+
Sequel doesn't have direct support for the underlying driver, and
|
126
|
+
where java.sql.DriverManager.getConnection does not work
|
127
|
+
correctly due to Java class loading issues.
|
128
|
+
|
129
|
+
= Other Improvements
|
130
|
+
|
131
|
+
* Multiple corner cases in Dataset#eager_graph have been fixed.
|
132
|
+
|
133
|
+
* Calling Dataset#columns when using the eager_each plugin no
|
134
|
+
longer triggers eager loading.
|
135
|
+
|
136
|
+
* Database#column_schema_to_ruby_default is now a public method
|
137
|
+
in the schema_dumper extension.
|
138
|
+
|
139
|
+
* When validating associated objects for one_to_many and one_to_one
|
140
|
+
associations in the nested_attributes plugin, don't remove column
|
141
|
+
values if the association's foreign key is the associated model's
|
142
|
+
primary key.
|
143
|
+
|
144
|
+
* On PostgreSQL, Dataset#disable_insert_returning has been added
|
145
|
+
back. This disables the automatic use of RETURNING for INSERTs
|
146
|
+
for the dataset. This is necessary in cases where INSERT
|
147
|
+
RETURNING doesn't work, such as PostgreSQL <8.2 (or PostgreSQL
|
148
|
+
variants that forked before 8.2), or when using partitioning
|
149
|
+
with trigger functions, or conditional rules.
|
150
|
+
|
151
|
+
Note that if you use disable_insert_returning, insert will not
|
152
|
+
return the autoincremented primary key. You need to call
|
153
|
+
currval or lastval manually using the same connection to get
|
154
|
+
the value, or use nextval to get the value to use before
|
155
|
+
inserting.
|
156
|
+
|
157
|
+
* The pg_array extension now uses the correct database type when
|
158
|
+
typecasting values for smallint, oid, real, character, and varchar
|
159
|
+
arrays. Previously, Sequel did not use the correct database type
|
160
|
+
in some cases (e.g. text[] for a varchar[]), which resulted in
|
161
|
+
errors if the value was used in a filter expression.
|
162
|
+
|
163
|
+
* Additional unique constraint violations are now recognized on
|
164
|
+
SQLite.
|
165
|
+
|
166
|
+
* Check constraint violations are now recognized on SQLite >=3.8.2.
|
167
|
+
|
168
|
+
* Adapters that emulate bitwise operators now do so using an append
|
169
|
+
only design, similar to how all other queries are built in Sequel.
|
170
|
+
|
171
|
+
= Backwards Compatibility
|
172
|
+
|
173
|
+
* In some cases Sequel no longer adds superfluous parentheses when
|
174
|
+
constructing SQL strings. If you are testing for specific SQL,
|
175
|
+
this can cause test failures.
|
176
|
+
|
177
|
+
* The pg_array extension no longer recognizes the :typecast_method
|
178
|
+
option when registering an array type. The option allowed reuse
|
179
|
+
of an existing typecast method, but as that results in an incorrect
|
180
|
+
type at the database level, the option was fundementally broken.
|
181
|
+
|
182
|
+
* The internals of the PostgreSQL array parser have changed. If you
|
183
|
+
were relying on them, you'll need to update your code.
|
184
|
+
|
185
|
+
* Dataset#complex_expression_arg_pairs private method now returns
|
186
|
+
nested expression objects instead of an already literalized string
|
187
|
+
in some cases. Custom adapters that call this method will probably
|
188
|
+
need to be changed. It's recommended that such adapters switch to
|
189
|
+
using the new Dataset#complex_expression_emulate_append method if
|
190
|
+
possible.
|
data/doc/security.rdoc
CHANGED
@@ -126,7 +126,7 @@ Note that for that type of query, Sequel generally encourages the following form
|
|
126
126
|
DB[:table].where{|o| o.name > params[:id].to_s} # Safe
|
127
127
|
|
128
128
|
Sequel's DSL supports a wide variety of SQL concepts, so it's possible to
|
129
|
-
code most applications without
|
129
|
+
code most applications without ever using raw SQL.
|
130
130
|
|
131
131
|
A large number of dataset methods ultimately pass down their arguments to a filter
|
132
132
|
method, even some you may not expect, so you should be careful. At least the
|
data/doc/testing.rdoc
CHANGED
@@ -29,7 +29,7 @@ These run each test in its own transaction, the recommended way to test.
|
|
29
29
|
end
|
30
30
|
end
|
31
31
|
|
32
|
-
=== RSpec
|
32
|
+
=== RSpec >=2.8
|
33
33
|
|
34
34
|
# Global around filters should work
|
35
35
|
RSpec.configure do |c|
|
@@ -126,7 +126,7 @@ The order in which you delete/truncate the tables is important if you are using
|
|
126
126
|
|
127
127
|
= Testing Sequel Itself
|
128
128
|
|
129
|
-
Sequel has multiple separate test suites. All test suites run under
|
129
|
+
Sequel has multiple separate test suites. All test suites run under rspec >=1.3.
|
130
130
|
|
131
131
|
== rake spec
|
132
132
|
|
data/doc/validations.rdoc
CHANGED
@@ -510,6 +510,14 @@ Here, you don't care about validating the release date if there were validation
|
|
510
510
|
album.errors.full_messages
|
511
511
|
# => ["name cannot be empty"]
|
512
512
|
|
513
|
+
Note that the column names used in the errors are used verbatim in the error messages. If you want full control over the error messages, you can use +add+ with a literal string:
|
514
|
+
|
515
|
+
errors.add(:name, Sequel.lit("Album name is not valid"))
|
516
|
+
errors.full_messages
|
517
|
+
# => ["Album name is not valid"]
|
518
|
+
|
519
|
+
Alternatively, feel free to override Sequel::Model::Errors#full_messages. As long as it returns an array of strings, overridding it is completely safe.
|
520
|
+
|
513
521
|
=== +count+
|
514
522
|
|
515
523
|
+count+ returns the total number of error messages in the errors.
|
data/lib/sequel/adapters/jdbc.rb
CHANGED
@@ -188,7 +188,7 @@ module Sequel
|
|
188
188
|
# The type of database we are connecting to
|
189
189
|
attr_reader :database_type
|
190
190
|
|
191
|
-
# The Java database driver we are using
|
191
|
+
# The Java database driver we are using (should be a Java class)
|
192
192
|
attr_reader :driver
|
193
193
|
|
194
194
|
# Whether to convert some Java types to ruby types when retrieving rows.
|
@@ -379,8 +379,10 @@ module Sequel
|
|
379
379
|
|
380
380
|
resolved_uri = jndi? ? get_uri_from_jndi : uri
|
381
381
|
|
382
|
-
if match = /\Ajdbc:([^:]+)/.match(resolved_uri)
|
383
|
-
|
382
|
+
@driver = if (match = /\Ajdbc:([^:]+)/.match(resolved_uri)) && (prok = DATABASE_SETUP[match[1].to_sym])
|
383
|
+
prok.call(self)
|
384
|
+
else
|
385
|
+
@opts[:driver]
|
384
386
|
end
|
385
387
|
end
|
386
388
|
|
@@ -173,8 +173,6 @@ module Sequel
|
|
173
173
|
PAREN_OPEN = Dataset::PAREN_OPEN
|
174
174
|
OFFSET = Dataset::OFFSET
|
175
175
|
CAST_STRING_OPEN = "RTRIM(".freeze
|
176
|
-
BITCOMP_OPEN = "((0 - ".freeze
|
177
|
-
BITCOMP_CLOSE = ") - 1)".freeze
|
178
176
|
BLOB_OPEN = "CAST(X'".freeze
|
179
177
|
BLOB_CLOSE = "' AS BLOB)".freeze
|
180
178
|
HSTAR = "H*".freeze
|
@@ -212,14 +210,10 @@ module Sequel
|
|
212
210
|
|
213
211
|
def complex_expression_sql_append(sql, op, args)
|
214
212
|
case op
|
215
|
-
when
|
216
|
-
sql
|
213
|
+
when :%, :'B~'
|
214
|
+
complex_expression_emulate_append(sql, op, args)
|
217
215
|
when :&, :|, :^, :<<, :>>
|
218
216
|
raise Error, "Derby doesn't support the #{op} operator"
|
219
|
-
when :'B~'
|
220
|
-
sql << BITCOMP_OPEN
|
221
|
-
literal_append(sql, args.at(0))
|
222
|
-
sql << BITCOMP_CLOSE
|
223
217
|
when :extract
|
224
218
|
sql << args.at(0).to_s << PAREN_OPEN
|
225
219
|
literal_append(sql, args.at(1))
|