sequel 3.41.0 → 3.42.0
Sign up to get free protection for your applications and to get access to all the features.
- data/CHANGELOG +28 -0
- data/README.rdoc +18 -6
- data/Rakefile +45 -40
- data/doc/release_notes/3.42.0.txt +74 -0
- data/lib/sequel/adapters/jdbc/postgresql.rb +55 -0
- data/lib/sequel/adapters/postgres.rb +4 -32
- data/lib/sequel/adapters/shared/mssql.rb +9 -3
- data/lib/sequel/adapters/shared/oracle.rb +5 -0
- data/lib/sequel/adapters/shared/postgres.rb +59 -2
- data/lib/sequel/adapters/shared/sqlite.rb +5 -0
- data/lib/sequel/database/misc.rb +21 -0
- data/lib/sequel/database/query.rb +10 -2
- data/lib/sequel/database/schema_generator.rb +9 -4
- data/lib/sequel/database/schema_methods.rb +18 -4
- data/lib/sequel/dataset/actions.rb +28 -12
- data/lib/sequel/dataset/query.rb +1 -1
- data/lib/sequel/dataset/sql.rb +1 -1
- data/lib/sequel/extensions/schema_dumper.rb +1 -0
- data/lib/sequel/model.rb +2 -3
- data/lib/sequel/model/base.rb +54 -33
- data/lib/sequel/model/dataset_module.rb +30 -0
- data/lib/sequel/plugins/force_encoding.rb +4 -1
- data/lib/sequel/version.rb +1 -1
- data/spec/adapters/postgres_spec.rb +84 -73
- data/spec/core/database_spec.rb +11 -3
- data/spec/core/dataset_spec.rb +22 -0
- data/spec/core/schema_spec.rb +41 -0
- data/spec/extensions/force_encoding_spec.rb +9 -0
- data/spec/integration/schema_test.rb +32 -7
- data/spec/model/base_spec.rb +16 -0
- metadata +5 -2
@@ -0,0 +1,30 @@
|
|
1
|
+
module Sequel
|
2
|
+
class Model
|
3
|
+
# This Module subclass is used by Model.dataset_module
|
4
|
+
# to add dataset methods to classes. It adds a couple
|
5
|
+
# of features standard Modules, allowing you to use
|
6
|
+
# the same subset method you can call on Model, as well
|
7
|
+
# as making sure that public methods added to the module
|
8
|
+
# automatically have class methods created for them.
|
9
|
+
class DatasetModule < ::Module
|
10
|
+
# Store the model related to this dataset module.
|
11
|
+
def initialize(model)
|
12
|
+
@model = model
|
13
|
+
end
|
14
|
+
|
15
|
+
# Define a named filter for this dataset, see
|
16
|
+
# Model.subset for details.
|
17
|
+
def subset(name, *args, &block)
|
18
|
+
define_method(name){filter(*args, &block)}
|
19
|
+
end
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
# Add a class method to the related model that
|
24
|
+
# calls the dataset method of the same name.
|
25
|
+
def method_added(meth)
|
26
|
+
@model.send(:def_model_dataset_method, meth)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -63,7 +63,10 @@ module Sequel
|
|
63
63
|
# Force the encoding of all returned strings to the model's forced_encoding.
|
64
64
|
def typecast_value(column, value)
|
65
65
|
s = super
|
66
|
-
|
66
|
+
if s.is_a?(String) && (fe = model.forced_encoding)
|
67
|
+
s = s.dup if s.frozen?
|
68
|
+
s.force_encoding(fe)
|
69
|
+
end
|
67
70
|
s
|
68
71
|
end
|
69
72
|
end
|
data/lib/sequel/version.rb
CHANGED
@@ -3,7 +3,7 @@ module Sequel
|
|
3
3
|
MAJOR = 3
|
4
4
|
# The minor version of Sequel. Bumped for every non-patch level
|
5
5
|
# release, generally around once a month.
|
6
|
-
MINOR =
|
6
|
+
MINOR = 42
|
7
7
|
# The tiny version of Sequel. Usually 0, only bumped for bugfix
|
8
8
|
# releases that fix regressions from previous versions.
|
9
9
|
TINY = 0
|
@@ -159,6 +159,15 @@ describe "A PostgreSQL dataset" do
|
|
159
159
|
@db.alter_table(:atest){drop_constraint 'atest_ex'}
|
160
160
|
end if POSTGRES_DB.server_version >= 90000
|
161
161
|
|
162
|
+
specify "should support Database#do for executing anonymous code blocks" do
|
163
|
+
@db.drop_table?(:btest)
|
164
|
+
@db.do "BEGIN EXECUTE 'CREATE TABLE btest (a INTEGER)'; EXECUTE 'INSERT INTO btest VALUES (1)'; END"
|
165
|
+
@db[:btest].select_map(:a).should == [1]
|
166
|
+
|
167
|
+
@db.do "BEGIN EXECUTE 'DROP TABLE btest; CREATE TABLE atest (a INTEGER)'; EXECUTE 'INSERT INTO atest VALUES (1)'; END", :language=>:plpgsql
|
168
|
+
@db[:atest].select_map(:a).should == [1]
|
169
|
+
end if POSTGRES_DB.server_version >= 90000
|
170
|
+
|
162
171
|
specify "should support adding foreign key constarints that are not yet valid, and validating them later" do
|
163
172
|
@db.create_table!(:atest){primary_key :id; Integer :fk}
|
164
173
|
@db[:atest].insert(1, 5)
|
@@ -1222,7 +1231,79 @@ if POSTGRES_DB.adapter_scheme == :postgres
|
|
1222
1231
|
end
|
1223
1232
|
end
|
1224
1233
|
|
1225
|
-
if POSTGRES_DB.adapter_scheme == :postgres && SEQUEL_POSTGRES_USES_PG && POSTGRES_DB.server_version >= 90000
|
1234
|
+
if ((POSTGRES_DB.adapter_scheme == :postgres && SEQUEL_POSTGRES_USES_PG) || POSTGRES_DB.adapter_scheme == :jdbc) && POSTGRES_DB.server_version >= 90000
|
1235
|
+
describe "Postgres::Database#copy_into" do
|
1236
|
+
before(:all) do
|
1237
|
+
@db = POSTGRES_DB
|
1238
|
+
@db.create_table!(:test_copy){Integer :x; Integer :y}
|
1239
|
+
@ds = @db[:test_copy].order(:x, :y)
|
1240
|
+
end
|
1241
|
+
before do
|
1242
|
+
@db[:test_copy].delete
|
1243
|
+
end
|
1244
|
+
after(:all) do
|
1245
|
+
@db.drop_table?(:test_copy)
|
1246
|
+
end
|
1247
|
+
|
1248
|
+
specify "should work with a :data option containing data in PostgreSQL text format" do
|
1249
|
+
@db.copy_into(:test_copy, :data=>"1\t2\n3\t4\n")
|
1250
|
+
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1251
|
+
end
|
1252
|
+
|
1253
|
+
specify "should work with :format=>:csv option and :data option containing data in CSV format" do
|
1254
|
+
@db.copy_into(:test_copy, :format=>:csv, :data=>"1,2\n3,4\n")
|
1255
|
+
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1256
|
+
end
|
1257
|
+
|
1258
|
+
specify "should respect given :options" do
|
1259
|
+
@db.copy_into(:test_copy, :options=>"FORMAT csv, HEADER TRUE", :data=>"x,y\n1,2\n3,4\n")
|
1260
|
+
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1261
|
+
end
|
1262
|
+
|
1263
|
+
specify "should respect given :options options when :format is used" do
|
1264
|
+
@db.copy_into(:test_copy, :options=>"QUOTE '''', DELIMITER '|'", :format=>:csv, :data=>"'1'|'2'\n'3'|'4'\n")
|
1265
|
+
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1266
|
+
end
|
1267
|
+
|
1268
|
+
specify "should accept :columns option to online copy the given columns" do
|
1269
|
+
@db.copy_into(:test_copy, :data=>"1\t2\n3\t4\n", :columns=>[:y, :x])
|
1270
|
+
@ds.select_map([:x, :y]).should == [[2, 1], [4, 3]]
|
1271
|
+
end
|
1272
|
+
|
1273
|
+
specify "should accept a block and use returned values for the copy in data stream" do
|
1274
|
+
buf = ["1\t2\n", "3\t4\n"]
|
1275
|
+
@db.copy_into(:test_copy){buf.shift}
|
1276
|
+
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1277
|
+
end
|
1278
|
+
|
1279
|
+
specify "should work correctly with a block and :format=>:csv" do
|
1280
|
+
buf = ["1,2\n", "3,4\n"]
|
1281
|
+
@db.copy_into(:test_copy, :format=>:csv){buf.shift}
|
1282
|
+
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1283
|
+
end
|
1284
|
+
|
1285
|
+
specify "should accept an enumerable as the :data option" do
|
1286
|
+
@db.copy_into(:test_copy, :data=>["1\t2\n", "3\t4\n"])
|
1287
|
+
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1288
|
+
end
|
1289
|
+
|
1290
|
+
specify "should have an exception, cause a rollback of copied data and still have a usable connection" do
|
1291
|
+
2.times do
|
1292
|
+
sent = false
|
1293
|
+
proc{@db.copy_into(:test_copy){raise ArgumentError if sent; sent = true; "1\t2\n"}}.should raise_error(ArgumentError)
|
1294
|
+
@ds.select_map([:x, :y]).should == []
|
1295
|
+
end
|
1296
|
+
end
|
1297
|
+
|
1298
|
+
specify "should raise an Error if both :data and a block are provided" do
|
1299
|
+
proc{@db.copy_into(:test_copy, :data=>["1\t2\n", "3\t4\n"]){}}.should raise_error(Sequel::Error)
|
1300
|
+
end
|
1301
|
+
|
1302
|
+
specify "should raise an Error if neither :data or a block are provided" do
|
1303
|
+
proc{@db.copy_into(:test_copy)}.should raise_error(Sequel::Error)
|
1304
|
+
end
|
1305
|
+
end
|
1306
|
+
|
1226
1307
|
describe "Postgres::Database#copy_table" do
|
1227
1308
|
before(:all) do
|
1228
1309
|
@db = POSTGRES_DB
|
@@ -1293,79 +1374,9 @@ if POSTGRES_DB.adapter_scheme == :postgres && SEQUEL_POSTGRES_USES_PG && POSTGRE
|
|
1293
1374
|
@db[:test_copy].select_order_map(:x).should == [1, 3]
|
1294
1375
|
end
|
1295
1376
|
end
|
1377
|
+
end
|
1296
1378
|
|
1297
|
-
|
1298
|
-
before(:all) do
|
1299
|
-
@db = POSTGRES_DB
|
1300
|
-
@db.create_table!(:test_copy){Integer :x; Integer :y}
|
1301
|
-
@ds = @db[:test_copy].order(:x, :y)
|
1302
|
-
end
|
1303
|
-
before do
|
1304
|
-
@db[:test_copy].delete
|
1305
|
-
end
|
1306
|
-
after(:all) do
|
1307
|
-
@db.drop_table?(:test_copy)
|
1308
|
-
end
|
1309
|
-
|
1310
|
-
specify "should work with a :data option containing data in PostgreSQL text format" do
|
1311
|
-
@db.copy_into(:test_copy, :data=>"1\t2\n3\t4\n")
|
1312
|
-
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1313
|
-
end
|
1314
|
-
|
1315
|
-
specify "should work with :format=>:csv option and :data option containing data in CSV format" do
|
1316
|
-
@db.copy_into(:test_copy, :format=>:csv, :data=>"1,2\n3,4\n")
|
1317
|
-
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1318
|
-
end
|
1319
|
-
|
1320
|
-
specify "should respect given :options" do
|
1321
|
-
@db.copy_into(:test_copy, :options=>"FORMAT csv, HEADER TRUE", :data=>"x,y\n1,2\n3,4\n")
|
1322
|
-
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1323
|
-
end
|
1324
|
-
|
1325
|
-
specify "should respect given :options options when :format is used" do
|
1326
|
-
@db.copy_into(:test_copy, :options=>"QUOTE '''', DELIMITER '|'", :format=>:csv, :data=>"'1'|'2'\n'3'|'4'\n")
|
1327
|
-
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1328
|
-
end
|
1329
|
-
|
1330
|
-
specify "should accept :columns option to online copy the given columns" do
|
1331
|
-
@db.copy_into(:test_copy, :data=>"1\t2\n3\t4\n", :columns=>[:y, :x])
|
1332
|
-
@ds.select_map([:x, :y]).should == [[2, 1], [4, 3]]
|
1333
|
-
end
|
1334
|
-
|
1335
|
-
specify "should accept a block and use returned values for the copy in data stream" do
|
1336
|
-
buf = ["1\t2\n", "3\t4\n"]
|
1337
|
-
@db.copy_into(:test_copy){buf.shift}
|
1338
|
-
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1339
|
-
end
|
1340
|
-
|
1341
|
-
specify "should work correctly with a block and :format=>:csv" do
|
1342
|
-
buf = ["1,2\n", "3,4\n"]
|
1343
|
-
@db.copy_into(:test_copy, :format=>:csv){buf.shift}
|
1344
|
-
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1345
|
-
end
|
1346
|
-
|
1347
|
-
specify "should accept an enumerable as the :data option" do
|
1348
|
-
@db.copy_into(:test_copy, :data=>["1\t2\n", "3\t4\n"])
|
1349
|
-
@ds.select_map([:x, :y]).should == [[1, 2], [3, 4]]
|
1350
|
-
end
|
1351
|
-
|
1352
|
-
specify "should have an exception should cause a rollback of copied data and still have a usable connection" do
|
1353
|
-
2.times do
|
1354
|
-
sent = false
|
1355
|
-
proc{@db.copy_into(:test_copy){raise ArgumentError if sent; sent = true; "1\t2\n"}}.should raise_error(ArgumentError)
|
1356
|
-
@ds.select_map([:x, :y]).should == []
|
1357
|
-
end
|
1358
|
-
end
|
1359
|
-
|
1360
|
-
specify "should raise an Error if both :data and a block are provided" do
|
1361
|
-
proc{@db.copy_into(:test_copy, :data=>["1\t2\n", "3\t4\n"]){}}.should raise_error(Sequel::Error)
|
1362
|
-
end
|
1363
|
-
|
1364
|
-
specify "should raise an Error if neither :data or a block are provided" do
|
1365
|
-
proc{@db.copy_into(:test_copy)}.should raise_error(Sequel::Error)
|
1366
|
-
end
|
1367
|
-
end
|
1368
|
-
|
1379
|
+
if POSTGRES_DB.adapter_scheme == :postgres && SEQUEL_POSTGRES_USES_PG && POSTGRES_DB.server_version >= 90000
|
1369
1380
|
describe "Postgres::Database LISTEN/NOTIFY" do
|
1370
1381
|
before(:all) do
|
1371
1382
|
@db = POSTGRES_DB
|
data/spec/core/database_spec.rb
CHANGED
@@ -22,10 +22,18 @@ describe "A new Database" do
|
|
22
22
|
Sequel::Database.new(1 => 2, :logger => 4, :loggers => 3).loggers.should == [4,3]
|
23
23
|
Sequel::Database.new(1 => 2, :logger => [4], :loggers => [3]).loggers.should == [4,3]
|
24
24
|
end
|
25
|
+
|
26
|
+
specify "should handle the default string column size" do
|
27
|
+
@db.default_string_column_size.should == 255
|
28
|
+
db = Sequel::Database.new(:default_string_column_size=>50)
|
29
|
+
db.default_string_column_size.should == 50
|
30
|
+
db.default_string_column_size = 2
|
31
|
+
db.default_string_column_size.should == 2
|
32
|
+
end
|
25
33
|
|
26
34
|
specify "should set the sql_log_level from opts[:sql_log_level]" do
|
27
|
-
|
28
|
-
|
35
|
+
Sequel::Database.new(1 => 2, :sql_log_level=>:debug).sql_log_level.should == :debug
|
36
|
+
Sequel::Database.new(1 => 2, :sql_log_level=>'debug').sql_log_level.should == :debug
|
29
37
|
end
|
30
38
|
|
31
39
|
specify "should create a connection pool" do
|
@@ -675,7 +683,7 @@ shared_examples_for "Database#transaction" do
|
|
675
683
|
end
|
676
684
|
e.should_not be_nil
|
677
685
|
e.wrapped_exception.should be_a_kind_of(ec)
|
678
|
-
@db.sqls.should == ['BEGIN', 'DROP TABLE test;']
|
686
|
+
@db.sqls.should == ['BEGIN', 'DROP TABLE test;', 'ROLLBACK']
|
679
687
|
end
|
680
688
|
|
681
689
|
specify "should handle errors when sending ROLLBACK" do
|
data/spec/core/dataset_spec.rb
CHANGED
@@ -1885,6 +1885,11 @@ describe "Dataset#count" do
|
|
1885
1885
|
@db.sqls.should == ["SELECT COUNT(*) AS count FROM (SELECT * FROM test LIMIT 5) AS t1 LIMIT 1"]
|
1886
1886
|
end
|
1887
1887
|
|
1888
|
+
specify "should work correctly with offsets" do
|
1889
|
+
@dataset.limit(nil, 5).count.should == 1
|
1890
|
+
@db.sqls.should == ["SELECT COUNT(*) AS count FROM (SELECT * FROM test OFFSET 5) AS t1 LIMIT 1"]
|
1891
|
+
end
|
1892
|
+
|
1888
1893
|
it "should work on a graphed_dataset" do
|
1889
1894
|
@dataset.should_receive(:columns).twice.and_return([:a])
|
1890
1895
|
@dataset.graph(@dataset, [:a], :table_alias=>:test2).count.should == 1
|
@@ -2382,6 +2387,13 @@ describe "Dataset aggregate methods" do
|
|
2382
2387
|
d.min(:a).should == 'SELECT min(a) FROM (SELECT * FROM test ORDER BY a LIMIT 5) AS t1 LIMIT 1'
|
2383
2388
|
d.max(:a).should == 'SELECT max(a) FROM (SELECT * FROM test ORDER BY a LIMIT 5) AS t1 LIMIT 1'
|
2384
2389
|
end
|
2390
|
+
|
2391
|
+
specify "should accept virtual row blocks" do
|
2392
|
+
@d.avg{a(b)}.should == 'SELECT avg(a(b)) FROM test LIMIT 1'
|
2393
|
+
@d.sum{a(b)}.should == 'SELECT sum(a(b)) FROM test LIMIT 1'
|
2394
|
+
@d.min{a(b)}.should == 'SELECT min(a(b)) FROM test LIMIT 1'
|
2395
|
+
@d.max{a(b)}.should == 'SELECT max(a(b)) FROM test LIMIT 1'
|
2396
|
+
end
|
2385
2397
|
end
|
2386
2398
|
|
2387
2399
|
describe "Dataset#range" do
|
@@ -2406,6 +2418,11 @@ describe "Dataset#range" do
|
|
2406
2418
|
@ds.order(:stamp).limit(5).range(:stamp).should == (1..10)
|
2407
2419
|
@db.sqls.should == ['SELECT min(stamp) AS v1, max(stamp) AS v2 FROM (SELECT * FROM test ORDER BY stamp LIMIT 5) AS t1 LIMIT 1']
|
2408
2420
|
end
|
2421
|
+
|
2422
|
+
specify "should accept virtual row blocks" do
|
2423
|
+
@ds.range{a(b)}
|
2424
|
+
@db.sqls.should == ["SELECT min(a(b)) AS v1, max(a(b)) AS v2 FROM test LIMIT 1"]
|
2425
|
+
end
|
2409
2426
|
end
|
2410
2427
|
|
2411
2428
|
describe "Dataset#interval" do
|
@@ -2426,6 +2443,11 @@ describe "Dataset#interval" do
|
|
2426
2443
|
@ds.order(:stamp).limit(5).interval(:stamp).should == 1234
|
2427
2444
|
@db.sqls.should == ['SELECT (max(stamp) - min(stamp)) FROM (SELECT * FROM test ORDER BY stamp LIMIT 5) AS t1 LIMIT 1']
|
2428
2445
|
end
|
2446
|
+
|
2447
|
+
specify "should accept virtual row blocks" do
|
2448
|
+
@ds.interval{a(b)}
|
2449
|
+
@db.sqls.should == ["SELECT (max(a(b)) - min(a(b))) FROM test LIMIT 1"]
|
2450
|
+
end
|
2429
2451
|
end
|
2430
2452
|
|
2431
2453
|
describe "Dataset #first and #last" do
|
data/spec/core/schema_spec.rb
CHANGED
@@ -67,6 +67,20 @@ describe "DB#create_table" do
|
|
67
67
|
@db.sqls.should == ['CREATE TABLE cats (o varchar(255) PRIMARY KEY AUTOINCREMENT, a varchar(255), b integer, c integer, d bigint, e double precision, f numeric, g date, h timestamp, i timestamp, j numeric, k blob, l boolean, m boolean, n integer, p date REFERENCES f)']
|
68
68
|
end
|
69
69
|
|
70
|
+
specify "should transform types given as ruby classes to database-specific types" do
|
71
|
+
@db.default_string_column_size = 50
|
72
|
+
@db.create_table(:cats) do
|
73
|
+
String :a
|
74
|
+
String :a2, :size=>13
|
75
|
+
String :a3, :fixed=>true
|
76
|
+
String :a4, :size=>13, :fixed=>true
|
77
|
+
String :a5, :text=>true
|
78
|
+
varchar :a6
|
79
|
+
varchar :a7, :size=>13
|
80
|
+
end
|
81
|
+
@db.sqls.should == ['CREATE TABLE cats (a varchar(50), a2 varchar(13), a3 char(50), a4 char(13), a5 text, a6 varchar(50), a7 varchar(13))']
|
82
|
+
end
|
83
|
+
|
70
84
|
specify "should allow the use of modifiers with ruby class types" do
|
71
85
|
@db.create_table(:cats) do
|
72
86
|
String :a, :size=>50
|
@@ -133,6 +147,33 @@ describe "DB#create_table" do
|
|
133
147
|
@db.sqls.should == ["CREATE TABLE cats (id integer, name text UNIQUE)"]
|
134
148
|
end
|
135
149
|
|
150
|
+
specify "should handle not deferred unique constraints" do
|
151
|
+
@db.create_table(:cats) do
|
152
|
+
integer :id
|
153
|
+
text :name
|
154
|
+
unique :name, :deferrable=>false
|
155
|
+
end
|
156
|
+
@db.sqls.should == ["CREATE TABLE cats (id integer, name text, UNIQUE (name) NOT DEFERRABLE)"]
|
157
|
+
end
|
158
|
+
|
159
|
+
specify "should handle deferred unique constraints" do
|
160
|
+
@db.create_table(:cats) do
|
161
|
+
integer :id
|
162
|
+
text :name
|
163
|
+
unique :name, :deferrable=>true
|
164
|
+
end
|
165
|
+
@db.sqls.should == ["CREATE TABLE cats (id integer, name text, UNIQUE (name) DEFERRABLE INITIALLY DEFERRED)"]
|
166
|
+
end
|
167
|
+
|
168
|
+
specify "should handle deferred initially immediate unique constraints" do
|
169
|
+
@db.create_table(:cats) do
|
170
|
+
integer :id
|
171
|
+
text :name
|
172
|
+
unique :name, :deferrable=>:immediate
|
173
|
+
end
|
174
|
+
@db.sqls.should == ["CREATE TABLE cats (id integer, name text, UNIQUE (name) DEFERRABLE INITIALLY IMMEDIATE)"]
|
175
|
+
end
|
176
|
+
|
136
177
|
specify "should accept unsigned definition" do
|
137
178
|
@db.create_table(:cats) do
|
138
179
|
integer :value, :unsigned => true
|
@@ -25,6 +25,15 @@ describe "force_encoding plugin" do
|
|
25
25
|
o.x.encoding.should == @e1
|
26
26
|
end
|
27
27
|
|
28
|
+
specify "should work correctly when given a frozen string" do
|
29
|
+
s = 'blah'
|
30
|
+
s.force_encoding('US-ASCII')
|
31
|
+
s.freeze
|
32
|
+
o = @c.new(:x=>s)
|
33
|
+
o.x.should == 'blah'
|
34
|
+
o.x.encoding.should == @e1
|
35
|
+
end
|
36
|
+
|
28
37
|
specify "should have a forced_encoding class accessor" do
|
29
38
|
s = 'blah'
|
30
39
|
s.force_encoding('US-ASCII')
|
@@ -163,20 +163,23 @@ describe "Database index parsing" do
|
|
163
163
|
end
|
164
164
|
|
165
165
|
specify "should parse indexes into a hash" do
|
166
|
+
# Delete :deferrable entry, since not all adapters implement it
|
167
|
+
f = lambda{h = INTEGRATION_DB.indexes(:items); h.values.each{|h2| h2.delete(:deferrable)}; h}
|
168
|
+
|
166
169
|
INTEGRATION_DB.create_table!(:items){Integer :n; Integer :a}
|
167
|
-
|
170
|
+
f.call.should == {}
|
168
171
|
INTEGRATION_DB.add_index(:items, :n)
|
169
|
-
|
172
|
+
f.call.should == {:items_n_index=>{:columns=>[:n], :unique=>false}}
|
170
173
|
INTEGRATION_DB.drop_index(:items, :n)
|
171
|
-
|
174
|
+
f.call.should == {}
|
172
175
|
INTEGRATION_DB.add_index(:items, :n, :unique=>true, :name=>:blah_blah_index)
|
173
|
-
|
176
|
+
f.call.should == {:blah_blah_index=>{:columns=>[:n], :unique=>true}}
|
174
177
|
INTEGRATION_DB.add_index(:items, [:n, :a])
|
175
|
-
|
178
|
+
f.call.should == {:blah_blah_index=>{:columns=>[:n], :unique=>true}, :items_n_a_index=>{:columns=>[:n, :a], :unique=>false}}
|
176
179
|
INTEGRATION_DB.drop_index(:items, :n, :name=>:blah_blah_index)
|
177
|
-
|
180
|
+
f.call.should == {:items_n_a_index=>{:columns=>[:n, :a], :unique=>false}}
|
178
181
|
INTEGRATION_DB.drop_index(:items, [:n, :a])
|
179
|
-
|
182
|
+
f.call.should == {}
|
180
183
|
end
|
181
184
|
|
182
185
|
specify "should not include a primary key index" do
|
@@ -619,6 +622,28 @@ describe "Database schema modifiers" do
|
|
619
622
|
@db[:items].insert(:number=>1)
|
620
623
|
@db[:items].get(:name).should == 'A13'
|
621
624
|
end
|
625
|
+
|
626
|
+
specify "should support deferrable foreign key constraints" do
|
627
|
+
@db.create_table!(:items2){Integer :id, :primary_key=>true}
|
628
|
+
@db.create_table!(:items){foreign_key :id, :items2, :deferrable=>true}
|
629
|
+
proc{@db[:items].insert(1)}.should raise_error(Sequel::DatabaseError)
|
630
|
+
proc{@db.transaction{proc{@db[:items].insert(1)}.should_not raise_error}}.should raise_error(Sequel::DatabaseError)
|
631
|
+
end if INTEGRATION_DB.supports_deferrable_foreign_key_constraints?
|
632
|
+
|
633
|
+
specify "should support deferrable unique constraints when creating or altering tables" do
|
634
|
+
@db.create_table!(:items){Integer :t; unique [:t], :name=>:atest_def, :deferrable=>true, :using=>:btree}
|
635
|
+
@db[:items].insert(1)
|
636
|
+
@db[:items].insert(2)
|
637
|
+
proc{@db[:items].insert(2)}.should raise_error(Sequel::DatabaseError)
|
638
|
+
proc{@db.transaction{proc{@db[:items].insert(2)}.should_not raise_error}}.should raise_error(Sequel::DatabaseError)
|
639
|
+
|
640
|
+
@db.create_table!(:items){Integer :t}
|
641
|
+
@db.alter_table(:items){add_unique_constraint [:t], :name=>:atest_def, :deferrable=>true, :using=>:btree}
|
642
|
+
@db[:items].insert(1)
|
643
|
+
@db[:items].insert(2)
|
644
|
+
proc{@db[:items].insert(2)}.should raise_error(Sequel::DatabaseError)
|
645
|
+
proc{@db.transaction{proc{@db[:items].insert(2)}.should_not raise_error}}.should raise_error(Sequel::DatabaseError)
|
646
|
+
end if INTEGRATION_DB.supports_deferrable_constraints?
|
622
647
|
end
|
623
648
|
|
624
649
|
test_tables = begin
|
data/spec/model/base_spec.rb
CHANGED
@@ -182,11 +182,21 @@ describe Sequel::Model, ".dataset_module" do
|
|
182
182
|
@c.dataset.return_3.should == 3
|
183
183
|
end
|
184
184
|
|
185
|
+
it "should also extend the instance_dataset with the module if the model has a dataset" do
|
186
|
+
@c.dataset_module{def return_3() 3 end}
|
187
|
+
@c.instance_dataset.return_3.should == 3
|
188
|
+
end
|
189
|
+
|
185
190
|
it "should add methods defined in the module to the class" do
|
186
191
|
@c.dataset_module{def return_3() 3 end}
|
187
192
|
@c.return_3.should == 3
|
188
193
|
end
|
189
194
|
|
195
|
+
it "should add methods defined in the module outside the block to the class" do
|
196
|
+
@c.dataset_module.module_eval{def return_3() 3 end}
|
197
|
+
@c.return_3.should == 3
|
198
|
+
end
|
199
|
+
|
190
200
|
it "should cache calls and readd methods if set_dataset is used" do
|
191
201
|
@c.dataset_module{def return_3() 3 end}
|
192
202
|
@c.set_dataset :items
|
@@ -251,6 +261,12 @@ describe Sequel::Model, ".dataset_module" do
|
|
251
261
|
Object.new.extend(@c.dataset_module Module.new{def return_3() 3 end}).return_3.should == 3
|
252
262
|
end
|
253
263
|
|
264
|
+
it "should have dataset_module support a subset method" do
|
265
|
+
@c.dataset_module{subset :released, :released}
|
266
|
+
@c.released.sql.should == 'SELECT * FROM items WHERE released'
|
267
|
+
@c.where(:foo).released.sql.should == 'SELECT * FROM items WHERE (foo AND released)'
|
268
|
+
end
|
269
|
+
|
254
270
|
it "should raise error if called with both an argument and ablock" do
|
255
271
|
proc{@c.dataset_module(Module.new{def return_3() 3 end}){}}.should raise_error(Sequel::Error)
|
256
272
|
end
|