sequel_core 1.2 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +20 -0
- data/Rakefile +1 -1
- data/lib/sequel_core/adapters/mysql.rb +50 -2
- data/lib/sequel_core/array_keys.rb +22 -1
- data/lib/sequel_core/database.rb +3 -0
- data/lib/sequel_core/dataset.rb +14 -11
- data/lib/sequel_core/dataset/convenience.rb +18 -0
- data/lib/sequel_core/dataset/sql.rb +2 -1
- data/lib/sequel_core/schema/schema_generator.rb +16 -0
- data/lib/sequel_core/schema/schema_sql.rb +4 -0
- data/spec/adapters/mysql_spec.rb +24 -0
- data/spec/array_keys_spec.rb +3 -0
- data/spec/database_spec.rb +9 -0
- data/spec/dataset_spec.rb +55 -0
- data/spec/schema_generator_spec.rb +6 -2
- data/spec/schema_generator_spec.rb.rej +35 -0
- data/spec/schema_spec.rb +27 -0
- data/spec/schema_spec.rb.rej +36 -0
- metadata +4 -2
data/CHANGELOG
CHANGED
@@ -1,3 +1,23 @@
|
|
1
|
+
=== 1.2.1 (2008-02-29)
|
2
|
+
|
3
|
+
* Added add_constraint and drop_constraint functionality to Database#alter_table (#182).
|
4
|
+
|
5
|
+
* Enhanced Dataset#multi_insert to accept datasets (#179).
|
6
|
+
|
7
|
+
* Added MySQL::Database#use method for switching database (#180).
|
8
|
+
|
9
|
+
* Enhanced Database.uri_to_options to accept uri strings (#178).
|
10
|
+
|
11
|
+
* Added Dataset#columns! method that always makes a roundtrip to the DB (#177).
|
12
|
+
|
13
|
+
* Added new Dataset#each_page method that iterates over all pages in the result set (#175).
|
14
|
+
|
15
|
+
* Added Dataset#reverse alias to Dataset#reverse_order (#174).
|
16
|
+
|
17
|
+
* Fixed Dataset#transform_load and #transform_save to create a trasnformed copy of the supplied hash instead of transforming it in place (#184).
|
18
|
+
|
19
|
+
* Implemented MySQL::Dataset#replace (#163).
|
20
|
+
|
1
21
|
=== 1.2 (2008-02-15)
|
2
22
|
|
3
23
|
* Added support for :varchar[100] like type declarations in #create_table.
|
data/Rakefile
CHANGED
@@ -9,7 +9,7 @@ include FileUtils
|
|
9
9
|
# Configuration
|
10
10
|
##############################################################################
|
11
11
|
NAME = "sequel_core"
|
12
|
-
VERS = "1.2"
|
12
|
+
VERS = "1.2.1"
|
13
13
|
CLEAN.include ["**/.*.sw?", "pkg/*", ".config", "doc/*", "coverage/*"]
|
14
14
|
RDOC_OPTS = [
|
15
15
|
"--quiet",
|
@@ -204,6 +204,14 @@ module Sequel
|
|
204
204
|
end
|
205
205
|
end
|
206
206
|
end
|
207
|
+
|
208
|
+
# Changes the database in use by issuing a USE statement.
|
209
|
+
def use(db_name)
|
210
|
+
disconnect
|
211
|
+
@opts[:database] = db_name
|
212
|
+
self << "USE #{db_name}"
|
213
|
+
self
|
214
|
+
end
|
207
215
|
end
|
208
216
|
|
209
217
|
class Dataset < Sequel::Dataset
|
@@ -314,7 +322,7 @@ module Sequel
|
|
314
322
|
end
|
315
323
|
|
316
324
|
# MySQL supports ORDER and LIMIT clauses in UPDATE statements.
|
317
|
-
def update_sql(values, opts = nil)
|
325
|
+
def update_sql(values, opts = nil, &block)
|
318
326
|
sql = super
|
319
327
|
opts = opts ? @opts.merge(opts) : @opts
|
320
328
|
|
@@ -328,6 +336,42 @@ module Sequel
|
|
328
336
|
sql
|
329
337
|
end
|
330
338
|
|
339
|
+
def replace_sql(*values)
|
340
|
+
if values.empty?
|
341
|
+
"REPLACE INTO #{@opts[:from]} DEFAULT VALUES"
|
342
|
+
else
|
343
|
+
values = values[0] if values.size == 1
|
344
|
+
case values
|
345
|
+
when Sequel::Model
|
346
|
+
insert_sql(values.values)
|
347
|
+
when Array
|
348
|
+
if values.empty?
|
349
|
+
"REPLACE INTO #{@opts[:from]} DEFAULT VALUES"
|
350
|
+
elsif values.keys
|
351
|
+
fl = values.keys.map {|f| literal(f.is_a?(String) ? f.to_sym : f)}
|
352
|
+
vl = @transform ? transform_save(values.values) : values.values
|
353
|
+
vl.map! {|v| literal(v)}
|
354
|
+
"REPLACE INTO #{@opts[:from]} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
|
355
|
+
else
|
356
|
+
"REPLACE INTO #{@opts[:from]} VALUES (#{literal(values)})"
|
357
|
+
end
|
358
|
+
when Hash
|
359
|
+
values = transform_save(values) if @transform
|
360
|
+
if values.empty?
|
361
|
+
"REPLACE INTO #{@opts[:from]} DEFAULT VALUES"
|
362
|
+
else
|
363
|
+
fl, vl = [], []
|
364
|
+
values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}
|
365
|
+
"REPLACE INTO #{@opts[:from]} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
|
366
|
+
end
|
367
|
+
when Dataset
|
368
|
+
"REPLACE INTO #{@opts[:from]} #{literal(values)}"
|
369
|
+
else
|
370
|
+
"REPLACE INTO #{@opts[:from]} VALUES (#{literal(values)})"
|
371
|
+
end
|
372
|
+
end
|
373
|
+
end
|
374
|
+
|
331
375
|
# MySQL supports ORDER and LIMIT clauses in DELETE statements.
|
332
376
|
def delete_sql(opts = nil)
|
333
377
|
sql = super
|
@@ -350,7 +394,11 @@ module Sequel
|
|
350
394
|
def update(*args, &block)
|
351
395
|
@db.execute(update_sql(*args, &block)) {|c| c.affected_rows}
|
352
396
|
end
|
353
|
-
|
397
|
+
|
398
|
+
def replace(*args)
|
399
|
+
@db.execute(replace_sql(*args)) {|c| c.insert_id}
|
400
|
+
end
|
401
|
+
|
354
402
|
def delete(opts = nil)
|
355
403
|
@db.execute(delete_sql(opts)) {|c| c.affected_rows}
|
356
404
|
end
|
@@ -62,7 +62,7 @@ module ArrayKeys
|
|
62
62
|
# Converts the array into a hash.
|
63
63
|
def to_hash
|
64
64
|
h = {}
|
65
|
-
each_with_index {|v, i| h[@keys[i].to_sym] = v}
|
65
|
+
each_with_index {|v, i| h[(k = @keys[i]) ? k.to_sym : nil] = v}
|
66
66
|
h
|
67
67
|
end
|
68
68
|
alias_method :to_h, :to_hash
|
@@ -238,6 +238,23 @@ module ArrayKeys
|
|
238
238
|
end
|
239
239
|
end
|
240
240
|
end
|
241
|
+
|
242
|
+
def array_tuples_transform_load(r)
|
243
|
+
a = []; a.keys = []
|
244
|
+
r.each_pair do |k, v|
|
245
|
+
a[k] = (tt = @transform[k]) ? tt[0][v] : v
|
246
|
+
end
|
247
|
+
a
|
248
|
+
end
|
249
|
+
|
250
|
+
# Applies the value transform for data saved to the database.
|
251
|
+
def array_tuples_transform_save(r)
|
252
|
+
a = []; a.keys = []
|
253
|
+
r.each_pair do |k, v|
|
254
|
+
a[k] = (tt = @transform[k]) ? tt[1][v] : v
|
255
|
+
end
|
256
|
+
a
|
257
|
+
end
|
241
258
|
end
|
242
259
|
end
|
243
260
|
|
@@ -266,9 +283,13 @@ module Sequel
|
|
266
283
|
else
|
267
284
|
alias_method :orig_each, :each
|
268
285
|
alias_method :orig_update_each_method, :update_each_method
|
286
|
+
|
269
287
|
include ArrayKeys::DatasetExtensions
|
270
288
|
alias_method :each, :array_tuples_each
|
271
289
|
alias_method :update_each_method, :array_tuples_update_each_method
|
290
|
+
|
291
|
+
alias_method :transform_load, :array_tuples_transform_load
|
292
|
+
alias_method :transform_save, :array_tuples_transform_save
|
272
293
|
end
|
273
294
|
end
|
274
295
|
end
|
data/lib/sequel_core/database.rb
CHANGED
@@ -379,6 +379,9 @@ module Sequel
|
|
379
379
|
# Converts a uri to an options hash. These options are then passed
|
380
380
|
# to a newly created database object.
|
381
381
|
def self.uri_to_options(uri)
|
382
|
+
if uri.is_a?(String)
|
383
|
+
uri = URI.parse(uri)
|
384
|
+
end
|
382
385
|
{
|
383
386
|
:user => uri.user,
|
384
387
|
:password => uri.password,
|
data/lib/sequel_core/dataset.rb
CHANGED
@@ -70,7 +70,7 @@ module Sequel
|
|
70
70
|
include SQL
|
71
71
|
include Convenience
|
72
72
|
|
73
|
-
|
73
|
+
attr_accessor :db
|
74
74
|
attr_accessor :opts
|
75
75
|
|
76
76
|
alias_method :size, :count
|
@@ -159,6 +159,11 @@ module Sequel
|
|
159
159
|
@columns || []
|
160
160
|
end
|
161
161
|
|
162
|
+
def columns!
|
163
|
+
first
|
164
|
+
@columns || []
|
165
|
+
end
|
166
|
+
|
162
167
|
# Inserts the supplied values into the associated table.
|
163
168
|
def <<(*args)
|
164
169
|
insert(*args)
|
@@ -352,22 +357,20 @@ module Sequel
|
|
352
357
|
|
353
358
|
# Applies the value transform for data loaded from the database.
|
354
359
|
def transform_load(r)
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
360
|
+
r.inject({}) do |m, kv|
|
361
|
+
k, v = *kv
|
362
|
+
m[k] = (tt = @transform[k]) ? tt[0][v] : v
|
363
|
+
m
|
359
364
|
end
|
360
|
-
r
|
361
365
|
end
|
362
366
|
|
363
367
|
# Applies the value transform for data saved to the database.
|
364
368
|
def transform_save(r)
|
365
|
-
|
366
|
-
|
367
|
-
|
368
|
-
|
369
|
+
r.inject({}) do |m, kv|
|
370
|
+
k, v = *kv
|
371
|
+
m[k] = (tt = @transform[k]) ? tt[1][v] : v
|
372
|
+
m
|
369
373
|
end
|
370
|
-
r
|
371
374
|
end
|
372
375
|
|
373
376
|
# Updates the each method according to whether @row_proc and @transform are
|
@@ -123,6 +123,19 @@ module Sequel
|
|
123
123
|
@pagination_record_count = record_count
|
124
124
|
@page_count = (record_count / page_size.to_f).ceil
|
125
125
|
end
|
126
|
+
|
127
|
+
def each_page(page_size)
|
128
|
+
record_count = count
|
129
|
+
total_pages = (record_count / page_size.to_f).ceil
|
130
|
+
|
131
|
+
(1..total_pages).each do |page_no|
|
132
|
+
paginated = limit(page_size, (page_no - 1) * page_size)
|
133
|
+
paginated.set_pagination_info(page_no, page_size, record_count)
|
134
|
+
yield paginated
|
135
|
+
end
|
136
|
+
|
137
|
+
self
|
138
|
+
end
|
126
139
|
|
127
140
|
attr_accessor :page_size, :page_count, :current_page, :pagination_record_count
|
128
141
|
|
@@ -246,6 +259,11 @@ module Sequel
|
|
246
259
|
return
|
247
260
|
elsif args[0].is_a?(Array) && args[1].is_a?(Array)
|
248
261
|
columns, values, opts = *args
|
262
|
+
elsif args[0].is_a?(Array) && args[1].is_a?(Dataset)
|
263
|
+
table = @opts[:from].first
|
264
|
+
columns, dataset = *args
|
265
|
+
sql = "INSERT INTO #{table} (#{literal(columns)}) VALUES (#{dataset.sql})"
|
266
|
+
return @db.transaction {@db.execute sql}
|
249
267
|
else
|
250
268
|
# we assume that an array of hashes is given
|
251
269
|
hashes, opts = *args
|
@@ -214,6 +214,7 @@ module Sequel
|
|
214
214
|
def reverse_order(*order)
|
215
215
|
order(*invert_order(order.empty? ? @opts[:order] : order))
|
216
216
|
end
|
217
|
+
alias_method :reverse, :reverse_order
|
217
218
|
|
218
219
|
# Inverts the given order by breaking it into a list of column references
|
219
220
|
# and inverting them.
|
@@ -544,7 +545,7 @@ module Sequel
|
|
544
545
|
#
|
545
546
|
# This method may be overriden by descendants.
|
546
547
|
def multi_insert_sql(columns, values)
|
547
|
-
table = @opts[:from]
|
548
|
+
table = @opts[:from].first
|
548
549
|
columns = literal(columns)
|
549
550
|
values.map do |r|
|
550
551
|
"INSERT INTO #{table} (#{columns}) VALUES (#{literal(r)})"
|
@@ -147,6 +147,22 @@ module Sequel
|
|
147
147
|
:columns => columns \
|
148
148
|
}
|
149
149
|
end
|
150
|
+
|
151
|
+
def add_constraint(name, *args, &block)
|
152
|
+
@operations << { \
|
153
|
+
:op => :add_constraint, \
|
154
|
+
:name => name, \
|
155
|
+
:type => :check, \
|
156
|
+
:check => block || args \
|
157
|
+
}
|
158
|
+
end
|
159
|
+
|
160
|
+
def drop_constraint(name)
|
161
|
+
@operations << { \
|
162
|
+
:op => :drop_constraint, \
|
163
|
+
:name => name \
|
164
|
+
}
|
165
|
+
end
|
150
166
|
end
|
151
167
|
end
|
152
168
|
end
|
@@ -143,6 +143,10 @@ module Sequel
|
|
143
143
|
index_definition_sql(table, op)
|
144
144
|
when :drop_index
|
145
145
|
"DROP INDEX #{default_index_name(table, op[:columns])}"
|
146
|
+
when :add_constraint
|
147
|
+
"ALTER TABLE #{table} ADD #{constraint_definition_sql(op)}"
|
148
|
+
when :drop_constraint
|
149
|
+
"ALTER TABLE #{table} DROP CONSTRAINT #{literal(op[:name])}"
|
146
150
|
else
|
147
151
|
raise Error, "Unsupported ALTER TABLE operation"
|
148
152
|
end
|
data/spec/adapters/mysql_spec.rb
CHANGED
@@ -563,4 +563,28 @@ context "MySQL::Dataset#multi_insert" do
|
|
563
563
|
{:name => 'def', :value => 2}
|
564
564
|
]
|
565
565
|
end
|
566
|
+
end
|
567
|
+
|
568
|
+
context "MySQL::Dataset#replace" do
|
569
|
+
setup do
|
570
|
+
MYSQL_DB.drop_table(:items) if MYSQL_DB.table_exists?(:items)
|
571
|
+
MYSQL_DB.create_table :items do
|
572
|
+
integer :id, :unique => true
|
573
|
+
integer :value, :index => true
|
574
|
+
end
|
575
|
+
@d = MYSQL_DB[:items]
|
576
|
+
MYSQL_DB.sqls.clear
|
577
|
+
end
|
578
|
+
|
579
|
+
specify "should create a record if the condition is not met" do
|
580
|
+
@d.replace(:id => 111, :value => 333)
|
581
|
+
@d.all.should == [{:id => 111, :value => 333}]
|
582
|
+
end
|
583
|
+
|
584
|
+
specify "should update a record if the condition is met" do
|
585
|
+
@d << {:id => 111}
|
586
|
+
@d.all.should == [{:id => 111, :value => nil}]
|
587
|
+
@d.replace(:id => 111, :value => 333)
|
588
|
+
@d.all.should == [{:id => 111, :value => 333}]
|
589
|
+
end
|
566
590
|
end
|
data/spec/array_keys_spec.rb
CHANGED
@@ -552,6 +552,9 @@ context "Sequel.use_array_tuples" do
|
|
552
552
|
a[:b].should == 2
|
553
553
|
a[:c].should == 3
|
554
554
|
a[:d].should == nil
|
555
|
+
|
556
|
+
b = [12]; b.keys = [:a]
|
557
|
+
@ds.insert_sql(b).should == "INSERT INTO items (a) VALUES (2)"
|
555
558
|
end
|
556
559
|
|
557
560
|
specify "should work correctly with dataset with model" do
|
data/spec/database_spec.rb
CHANGED
@@ -579,6 +579,15 @@ context "Database#uri_to_options" do
|
|
579
579
|
h[:port].should == 1234
|
580
580
|
h[:database].should == 'blah'
|
581
581
|
end
|
582
|
+
|
583
|
+
specify "should accept a string and convert it to an options hash" do
|
584
|
+
h = Sequel::Database.uri_to_options('ttt://uuu:ppp@192.168.60.1:1234/blah')
|
585
|
+
h[:user].should == 'uuu'
|
586
|
+
h[:password].should == 'ppp'
|
587
|
+
h[:host].should == '192.168.60.1'
|
588
|
+
h[:port].should == 1234
|
589
|
+
h[:database].should == 'blah'
|
590
|
+
end
|
582
591
|
end
|
583
592
|
|
584
593
|
context "A single threaded database" do
|
data/spec/dataset_spec.rb
CHANGED
@@ -854,6 +854,11 @@ context "Dataset#reverse_order" do
|
|
854
854
|
@dataset.order(:clumsy.DESC, :fool).reverse_order.sql.should ==
|
855
855
|
'SELECT * FROM test ORDER BY clumsy, fool DESC'
|
856
856
|
end
|
857
|
+
|
858
|
+
specify "should have #reverse alias" do
|
859
|
+
@dataset.order(:name).reverse.sql.should ==
|
860
|
+
'SELECT * FROM test ORDER BY name DESC'
|
861
|
+
end
|
857
862
|
end
|
858
863
|
|
859
864
|
context "Dataset#limit" do
|
@@ -2025,6 +2030,24 @@ context "A paginated dataset" do
|
|
2025
2030
|
end
|
2026
2031
|
end
|
2027
2032
|
|
2033
|
+
context "Dataset#each_page" do
|
2034
|
+
setup do
|
2035
|
+
@d = Sequel::Dataset.new(nil).from(:items)
|
2036
|
+
@d.meta_def(:count) {153}
|
2037
|
+
end
|
2038
|
+
|
2039
|
+
specify "should iterate over each page in the resultset as a paginated dataset" do
|
2040
|
+
a = []
|
2041
|
+
@d.each_page(50) {|p| a << p}
|
2042
|
+
a.map {|p| p.sql}.should == [
|
2043
|
+
'SELECT * FROM items LIMIT 50 OFFSET 0',
|
2044
|
+
'SELECT * FROM items LIMIT 50 OFFSET 50',
|
2045
|
+
'SELECT * FROM items LIMIT 50 OFFSET 100',
|
2046
|
+
'SELECT * FROM items LIMIT 50 OFFSET 150',
|
2047
|
+
]
|
2048
|
+
end
|
2049
|
+
end
|
2050
|
+
|
2028
2051
|
context "Dataset#columns" do
|
2029
2052
|
setup do
|
2030
2053
|
@dataset = DummyDataset.new(nil).from(:items)
|
@@ -2045,6 +2068,21 @@ context "Dataset#columns" do
|
|
2045
2068
|
end
|
2046
2069
|
end
|
2047
2070
|
|
2071
|
+
context "Dataset#columns!" do
|
2072
|
+
setup do
|
2073
|
+
@dataset = DummyDataset.new(nil).from(:items)
|
2074
|
+
@dataset.meta_def(:columns=) {|c| @columns = c}
|
2075
|
+
@dataset.meta_def(:first) {@columns = select_sql(nil)}
|
2076
|
+
end
|
2077
|
+
|
2078
|
+
specify "should always call first" do
|
2079
|
+
@dataset.columns = nil
|
2080
|
+
@dataset.columns!.should == 'SELECT * FROM items'
|
2081
|
+
@dataset.opts[:from] = [:nana]
|
2082
|
+
@dataset.columns!.should == 'SELECT * FROM nana'
|
2083
|
+
end
|
2084
|
+
end
|
2085
|
+
|
2048
2086
|
require 'stringio'
|
2049
2087
|
|
2050
2088
|
context "Dataset#print" do
|
@@ -2146,6 +2184,17 @@ context "Dataset#multi_insert" do
|
|
2146
2184
|
]
|
2147
2185
|
end
|
2148
2186
|
|
2187
|
+
specify "should accept a columns array and a dataset" do
|
2188
|
+
@ds2 = Sequel::Dataset.new(@db).from(:cats).filter(:purr => true).select(:a, :b)
|
2189
|
+
|
2190
|
+
@ds.multi_insert([:x, :y], @ds2)
|
2191
|
+
@db.sqls.should == [
|
2192
|
+
'BEGIN',
|
2193
|
+
"INSERT INTO items (x, y) VALUES (SELECT a, b FROM cats WHERE (purr = 't'))",
|
2194
|
+
'COMMIT'
|
2195
|
+
]
|
2196
|
+
end
|
2197
|
+
|
2149
2198
|
specify "should accept a columns array and a values array with slice option" do
|
2150
2199
|
@ds.multi_insert([:x, :y], [[1, 2], [3, 4], [5, 6]], :slice => 2)
|
2151
2200
|
@db.sqls.should == [
|
@@ -2385,6 +2434,12 @@ context "Dataset#transform" do
|
|
2385
2434
|
@ds.each(:naked => true) {|r| f = r}
|
2386
2435
|
f.should == {:x => "wow", :y => 'hello'}
|
2387
2436
|
end
|
2437
|
+
|
2438
|
+
specify "should leave the supplied values intact" do
|
2439
|
+
h = {:x => :toast}
|
2440
|
+
@ds.insert(h)
|
2441
|
+
h.should == {:x => :toast}
|
2442
|
+
end
|
2388
2443
|
end
|
2389
2444
|
|
2390
2445
|
context "Dataset#transform" do
|
@@ -82,6 +82,8 @@ describe Sequel::Schema::AlterTableGenerator do
|
|
82
82
|
add_index [:fff, :ggg]
|
83
83
|
drop_index :hhh
|
84
84
|
add_full_text_index :blah
|
85
|
+
add_constraint :con1, ':fred > 100'
|
86
|
+
drop_constraint :con2
|
85
87
|
end
|
86
88
|
end
|
87
89
|
|
@@ -94,7 +96,9 @@ describe Sequel::Schema::AlterTableGenerator do
|
|
94
96
|
{:op => :set_column_default, :name => :eee, :default => 1},
|
95
97
|
{:op => :add_index, :columns => [:fff, :ggg]},
|
96
98
|
{:op => :drop_index, :columns => [:hhh]},
|
97
|
-
{:op => :add_index, :columns => [:blah], :full_text => true}
|
99
|
+
{:op => :add_index, :columns => [:blah], :full_text => true},
|
100
|
+
{:op => :add_constraint, :type => :check, :name => :con1, :check => [':fred > 100']},
|
101
|
+
{:op => :drop_constraint, :name => :con2}
|
98
102
|
]
|
99
103
|
end
|
100
|
-
end
|
104
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
***************
|
2
|
+
*** 82,87 ****
|
3
|
+
add_index [:fff, :ggg]
|
4
|
+
drop_index :hhh
|
5
|
+
add_full_text_index :blah
|
6
|
+
end
|
7
|
+
end
|
8
|
+
|
9
|
+
--- 82,89 ----
|
10
|
+
add_index [:fff, :ggg]
|
11
|
+
drop_index :hhh
|
12
|
+
add_full_text_index :blah
|
13
|
+
+ add_constraint :con1, ':fred > 100'
|
14
|
+
+ drop_constraint :con2
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
***************
|
19
|
+
*** 94,100 ****
|
20
|
+
{:op => :set_column_default, :name => :eee, :default => 1},
|
21
|
+
{:op => :add_index, :columns => [:fff, :ggg]},
|
22
|
+
{:op => :drop_index, :columns => [:hhh]},
|
23
|
+
- {:op => :add_index, :columns => [:blah], :full_text => true}
|
24
|
+
]
|
25
|
+
end
|
26
|
+
- end--- 96,104 ----
|
27
|
+
{:op => :set_column_default, :name => :eee, :default => 1},
|
28
|
+
{:op => :add_index, :columns => [:fff, :ggg]},
|
29
|
+
{:op => :drop_index, :columns => [:hhh]},
|
30
|
+
+ {:op => :add_index, :columns => [:blah], :full_text => true},
|
31
|
+
+ {:op => :add_constraint, :type => :check, :name => :con1, :check => [':fred > 100']},
|
32
|
+
+ {:op => :drop_constraint, :name => :con2}
|
33
|
+
]
|
34
|
+
end
|
35
|
+
+ end
|
data/spec/schema_spec.rb
CHANGED
@@ -279,3 +279,30 @@ context "DB#drop_table" do
|
|
279
279
|
@db.sqls.should == ['DROP TABLE cats']
|
280
280
|
end
|
281
281
|
end
|
282
|
+
|
283
|
+
context "DB#alter_table" do
|
284
|
+
setup do
|
285
|
+
@db = SchemaDummyDatabase.new
|
286
|
+
end
|
287
|
+
|
288
|
+
specify "should accept add constraint definitions" do
|
289
|
+
@db.alter_table(:cats) do
|
290
|
+
add_constraint :valid_score, 'score <= 100'
|
291
|
+
end
|
292
|
+
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100)"]
|
293
|
+
@db.sqls.clear
|
294
|
+
|
295
|
+
@db.alter_table(:cats) do
|
296
|
+
add_constraint(:blah_blah) {:x > 0 && :y < 1}
|
297
|
+
end
|
298
|
+
@db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT blah_blah CHECK (((x > 0) AND (y < 1)))"]
|
299
|
+
end
|
300
|
+
|
301
|
+
specify "should accept drop constraint definitions" do
|
302
|
+
@db.alter_table(:cats) do
|
303
|
+
drop_constraint :valid_score
|
304
|
+
end
|
305
|
+
@db.sqls.should == ["ALTER TABLE cats DROP CONSTRAINT valid_score"]
|
306
|
+
end
|
307
|
+
|
308
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
***************
|
2
|
+
*** 279,281 ****
|
3
|
+
@db.sqls.should == ['DROP TABLE cats']
|
4
|
+
end
|
5
|
+
end
|
6
|
+
--- 279,308 ----
|
7
|
+
@db.sqls.should == ['DROP TABLE cats']
|
8
|
+
end
|
9
|
+
end
|
10
|
+
+
|
11
|
+
+ context "DB#alter_table" do
|
12
|
+
+ setup do
|
13
|
+
+ @db = SchemaDummyDatabase.new
|
14
|
+
+ end
|
15
|
+
+
|
16
|
+
+ specify "should accept add constraint definitions" do
|
17
|
+
+ @db.alter_table(:cats) do
|
18
|
+
+ add_constraint :valid_score, 'score <= 100'
|
19
|
+
+ end
|
20
|
+
+ @db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT valid_score CHECK (score <= 100)"]
|
21
|
+
+ @db.sqls.clear
|
22
|
+
+
|
23
|
+
+ @db.alter_table(:cats) do
|
24
|
+
+ add_constraint(:blah_blah) {:x > 0 && :y < 1}
|
25
|
+
+ end
|
26
|
+
+ @db.sqls.should == ["ALTER TABLE cats ADD CONSTRAINT blah_blah CHECK (((x > 0) AND (y < 1)))"]
|
27
|
+
+ end
|
28
|
+
+
|
29
|
+
+ specify "should accept drop constraint definitions" do
|
30
|
+
+ @db.alter_table(:cats) do
|
31
|
+
+ drop_constraint :valid_score
|
32
|
+
+ end
|
33
|
+
+ @db.sqls.should == ["ALTER TABLE cats DROP CONSTRAINT valid_score"]
|
34
|
+
+ end
|
35
|
+
+
|
36
|
+
+ end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: sequel_core
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 1.2.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Sharon Rosner
|
@@ -9,7 +9,7 @@ autorequire:
|
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
11
|
|
12
|
-
date: 2008-02-
|
12
|
+
date: 2008-02-29 00:00:00 +02:00
|
13
13
|
default_executable:
|
14
14
|
dependencies:
|
15
15
|
- !ruby/object:Gem::Dependency
|
@@ -87,7 +87,9 @@ files:
|
|
87
87
|
- spec/pretty_table_spec.rb
|
88
88
|
- spec/rcov.opts
|
89
89
|
- spec/schema_generator_spec.rb
|
90
|
+
- spec/schema_generator_spec.rb.rej
|
90
91
|
- spec/schema_spec.rb
|
92
|
+
- spec/schema_spec.rb.rej
|
91
93
|
- spec/sequelizer_spec.rb
|
92
94
|
- spec/spec.opts
|
93
95
|
- spec/spec_helper.rb
|