sequel 2.4.0 → 2.5.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. data/CHANGELOG +34 -0
  2. data/Rakefile +1 -1
  3. data/lib/sequel_core.rb +16 -7
  4. data/lib/sequel_core/adapters/ado.rb +6 -2
  5. data/lib/sequel_core/adapters/db2.rb +1 -1
  6. data/lib/sequel_core/adapters/jdbc.rb +2 -2
  7. data/lib/sequel_core/adapters/jdbc/postgresql.rb +22 -10
  8. data/lib/sequel_core/adapters/mysql.rb +2 -2
  9. data/lib/sequel_core/adapters/odbc.rb +6 -2
  10. data/lib/sequel_core/adapters/postgres.rb +25 -14
  11. data/lib/sequel_core/adapters/shared/mysql.rb +15 -35
  12. data/lib/sequel_core/adapters/shared/postgres.rb +137 -77
  13. data/lib/sequel_core/adapters/sqlite.rb +2 -2
  14. data/lib/sequel_core/core_ext.rb +11 -7
  15. data/lib/sequel_core/database.rb +18 -1
  16. data/lib/sequel_core/dataset.rb +23 -7
  17. data/lib/sequel_core/dataset/convenience.rb +1 -1
  18. data/lib/sequel_core/dataset/sql.rb +46 -31
  19. data/lib/sequel_core/exceptions.rb +4 -0
  20. data/lib/sequel_core/schema/generator.rb +43 -3
  21. data/lib/sequel_core/schema/sql.rb +52 -26
  22. data/lib/sequel_model.rb +2 -5
  23. data/lib/sequel_model/associations.rb +3 -3
  24. data/lib/sequel_model/base.rb +19 -13
  25. data/lib/sequel_model/record.rb +19 -11
  26. data/lib/sequel_model/schema.rb +10 -4
  27. data/lib/sequel_model/validations.rb +20 -7
  28. data/spec/adapters/mysql_spec.rb +1 -1
  29. data/spec/adapters/postgres_spec.rb +64 -9
  30. data/spec/integration/dataset_test.rb +32 -0
  31. data/spec/sequel_core/core_sql_spec.rb +38 -0
  32. data/spec/sequel_core/database_spec.rb +16 -1
  33. data/spec/sequel_core/dataset_spec.rb +66 -1
  34. data/spec/sequel_core/schema_generator_spec.rb +23 -3
  35. data/spec/sequel_core/schema_spec.rb +175 -4
  36. data/spec/sequel_model/record_spec.rb +47 -0
  37. data/spec/sequel_model/validations_spec.rb +70 -0
  38. metadata +2 -2
@@ -78,7 +78,7 @@ module Sequel
78
78
  conn.transaction{result = yield(conn)}
79
79
  result
80
80
  rescue ::Exception => e
81
- raise (SQLite3::Exception === e ? Error.new(e.message) : e) unless Error::Rollback === e
81
+ transaction_error(e, SQLite3::Exception)
82
82
  end
83
83
  end
84
84
  end
@@ -92,7 +92,7 @@ module Sequel
92
92
  log_info(sql, opts[:arguments])
93
93
  synchronize(opts[:server]){|conn| yield conn}
94
94
  rescue SQLite3::Exception => e
95
- raise Error::InvalidStatement, "#{sql}\r\n#{e.message}"
95
+ raise_error(e)
96
96
  end
97
97
  end
98
98
 
@@ -160,18 +160,18 @@ class String
160
160
  # Converts a string into a Date object.
161
161
  def to_date
162
162
  begin
163
- Date.parse(self)
163
+ Date.parse(self, Sequel.convert_two_digit_years)
164
164
  rescue => e
165
- raise Sequel::Error::InvalidValue, "Invalid date value '#{self}' (#{e.message})"
165
+ raise Sequel::Error::InvalidValue, "Invalid Date value '#{self}' (#{e.message})"
166
166
  end
167
167
  end
168
168
 
169
169
  # Converts a string into a DateTime object.
170
170
  def to_datetime
171
171
  begin
172
- DateTime.parse(self)
172
+ DateTime.parse(self, Sequel.convert_two_digit_years)
173
173
  rescue => e
174
- raise Sequel::Error::InvalidValue, "Invalid date value '#{self}' (#{e.message})"
174
+ raise Sequel::Error::InvalidValue, "Invalid DateTime value '#{self}' (#{e.message})"
175
175
  end
176
176
  end
177
177
 
@@ -179,9 +179,13 @@ class String
179
179
  # value of Sequel.datetime_class
180
180
  def to_sequel_time
181
181
  begin
182
- Sequel.datetime_class.parse(self)
182
+ if Sequel.datetime_class == DateTime
183
+ DateTime.parse(self, Sequel.convert_two_digit_years)
184
+ else
185
+ Sequel.datetime_class.parse(self)
186
+ end
183
187
  rescue => e
184
- raise Sequel::Error::InvalidValue, "Invalid time value '#{self}' (#{e.message})"
188
+ raise Sequel::Error::InvalidValue, "Invalid #{Sequel.datetime_class} value '#{self}' (#{e.message})"
185
189
  end
186
190
  end
187
191
 
@@ -190,7 +194,7 @@ class String
190
194
  begin
191
195
  Time.parse(self)
192
196
  rescue => e
193
- raise Sequel::Error::InvalidValue, "Invalid time value '#{self}' (#{e.message})"
197
+ raise Sequel::Error::InvalidValue, "Invalid Time value '#{self}' (#{e.message})"
194
198
  end
195
199
  end
196
200
  end
@@ -371,7 +371,7 @@ module Sequel
371
371
  rescue Exception => e
372
372
  log_info(SQL_ROLLBACK)
373
373
  conn.execute(SQL_ROLLBACK)
374
- raise e unless Error::Rollback === e
374
+ transaction_error(e)
375
375
  ensure
376
376
  unless e
377
377
  log_info(SQL_COMMIT)
@@ -495,6 +495,23 @@ module Sequel
495
495
  def connection_pool_default_options
496
496
  {}
497
497
  end
498
+
499
+ # Convert the given exception to a DatabaseError, keeping message
500
+ # and traceback.
501
+ def raise_error(exception, opts={})
502
+ if !opts[:classes] || exception.is_one_of?(*opts[:classes])
503
+ e = DatabaseError.new("#{exception.class} #{exception.message}")
504
+ e.set_backtrace(exception.backtrace)
505
+ raise e
506
+ else
507
+ raise exception
508
+ end
509
+ end
510
+
511
+ # Raise a database error unless the exception is an Error::Rollback.
512
+ def transaction_error(e, *classes)
513
+ raise_error(e, :classes=>classes) unless Error::Rollback === e
514
+ end
498
515
  end
499
516
  end
500
517
 
@@ -52,7 +52,8 @@ module Sequel
52
52
  group group_and_count group_by having inner_join intersect invert join
53
53
  left_outer_join limit naked or order order_by order_more paginate query reject
54
54
  reverse reverse_order right_outer_join select select_all select_more
55
- set_graph_aliases set_model sort sort_by unfiltered union unordered where'.collect{|x| x.to_sym}
55
+ set_defaults set_graph_aliases set_model set_overrides sort sort_by
56
+ unfiltered union unordered where'.collect{|x| x.to_sym}
56
57
 
57
58
  NOTIMPL_MSG = "This method must be overridden in Sequel adapters".freeze
58
59
  STOCK_TRANSFORMS = {
@@ -183,14 +184,15 @@ module Sequel
183
184
  end
184
185
  end
185
186
 
186
- # Deletes the records in the dataset. Adapters should override this method.
187
+ # Deletes the records in the dataset. The returned value is generally the
188
+ # number of records deleted, but that is adapter dependent.
187
189
  def delete(*args)
188
190
  execute_dui(delete_sql(*args))
189
191
  end
190
192
 
191
193
  # Iterates over the records in the dataset.
192
194
  def each(opts = nil, &block)
193
- if graph = @opts[:graph]
195
+ if @opts[:graph] and !(opts && opts[:graph] == false)
194
196
  graph_each(opts, &block)
195
197
  else
196
198
  row_proc = @row_proc unless opts && opts[:naked]
@@ -205,13 +207,14 @@ module Sequel
205
207
  end
206
208
 
207
209
  # Executes a select query and fetches records, passing each record to the
208
- # supplied block. Adapters should override this method.
210
+ # supplied block. The yielded records are generally hashes with symbol keys,
211
+ # but that is adapter dependent.
209
212
  def fetch_rows(sql, &block)
210
213
  raise NotImplementedError, NOTIMPL_MSG
211
214
  end
212
215
 
213
- # Inserts values into the associated table. Adapters should override this
214
- # method.
216
+ # Inserts values into the associated table. The returned value is generally
217
+ # the value of the primary key for the inserted row, but that is adapter dependent.
215
218
  def insert(*values)
216
219
  execute_dui(insert_sql(*values))
217
220
  end
@@ -260,6 +263,12 @@ module Sequel
260
263
  update(*args)
261
264
  end
262
265
 
266
+ # Set the default values for insert and update statements. The values passed
267
+ # to insert or update are merged into this hash.
268
+ def set_defaults(hash)
269
+ clone(:defaults=>(@opts[:defaults]||{}).merge(hash))
270
+ end
271
+
263
272
  # Associates or disassociates the dataset with a model(s). If
264
273
  # nil is specified, the dataset is turned into a naked dataset and returns
265
274
  # records as hashes. If a model class specified, the dataset is modified
@@ -342,6 +351,12 @@ module Sequel
342
351
  self
343
352
  end
344
353
 
354
+ # Set values that override hash arguments given to insert and update statements.
355
+ # This hash is merged into the hash provided to insert or update.
356
+ def set_overrides(hash)
357
+ clone(:overrides=>hash.merge(@opts[:overrides]||{}))
358
+ end
359
+
345
360
  # Sets a value transform which is used to convert values loaded and saved
346
361
  # to/from the database. The transform should be supplied as a hash. Each
347
362
  # value in the hash should be an array containing two proc objects - one
@@ -399,7 +414,8 @@ module Sequel
399
414
  end
400
415
  end
401
416
 
402
- # Updates values for the dataset. Adapters should override this method.
417
+ # Updates values for the dataset. The returned value is generally the
418
+ # number of rows updated, but that is adapter dependent.
403
419
  def update(*args)
404
420
  execute_dui(update_sql(*args))
405
421
  end
@@ -184,7 +184,7 @@ module Sequel
184
184
  # Returns the first value of the first record in the dataset.
185
185
  # Returns nil if dataset is empty.
186
186
  def single_value(opts = nil)
187
- if r = naked.single_record(opts)
187
+ if r = single_record((opts||{}).merge(:graph=>false, :naked=>true))
188
188
  r.values.first
189
189
  end
190
190
  end
@@ -70,6 +70,10 @@ module Sequel
70
70
  def delete_sql(opts = nil)
71
71
  opts = opts ? @opts.merge(opts) : @opts
72
72
 
73
+ if sql = opts[:sql]
74
+ return sql
75
+ end
76
+
73
77
  if opts[:group]
74
78
  raise Error::InvalidOperation, "Grouped datasets cannot be deleted from"
75
79
  elsif opts[:from].is_a?(Array) && opts[:from].size > 1
@@ -256,41 +260,46 @@ module Sequel
256
260
  # dataset.insert_sql(:a => 1, :b => 2) #=>
257
261
  # 'INSERT INTO items (a, b) VALUES (1, 2)'
258
262
  def insert_sql(*values)
259
- if values.empty?
260
- insert_default_values_sql
261
- else
262
- values = values[0] if values.size == 1
263
-
264
- # if hash or array with keys we need to transform the values
265
- if @transform && (values.is_a?(Hash) || (values.is_a?(Array) && values.keys))
266
- values = transform_save(values)
263
+ if sql = @opts[:sql]
264
+ return sql
265
+ end
266
+
267
+ from = source_list(@opts[:from])
268
+ case values.size
269
+ when 0
270
+ values = {}
271
+ when 1
272
+ vals = values.at(0)
273
+ if vals.is_one_of?(Hash, Dataset, Array)
274
+ values = vals
275
+ elsif vals.respond_to?(:values)
276
+ values = vals.values
267
277
  end
268
- from = source_list(@opts[:from])
269
-
270
- case values
271
- when Array
272
- if values.empty?
273
- insert_default_values_sql
274
- else
275
- "INSERT INTO #{from} VALUES #{literal(values)}"
276
- end
277
- when Hash
278
- if values.empty?
279
- insert_default_values_sql
280
- else
281
- fl, vl = [], []
282
- values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}
283
- "INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
284
- end
285
- when Dataset
286
- "INSERT INTO #{from} #{literal(values)}"
278
+ end
279
+
280
+ case values
281
+ when Array
282
+ if values.empty?
283
+ insert_default_values_sql
284
+ else
285
+ "INSERT INTO #{from} VALUES #{literal(values)}"
286
+ end
287
+ when Hash
288
+ values = @opts[:defaults].merge(values) if @opts[:defaults]
289
+ values = values.merge(@opts[:overrides]) if @opts[:overrides]
290
+ values = transform_save(values) if @transform
291
+ if values.empty?
292
+ insert_default_values_sql
287
293
  else
288
- if values.respond_to?(:values)
289
- insert_sql(values.values)
290
- else
291
- "INSERT INTO #{from} VALUES (#{literal(values)})"
294
+ fl, vl = [], []
295
+ values.each do |k, v|
296
+ fl << literal(String === k ? k.to_sym : k)
297
+ vl << literal(v)
292
298
  end
299
+ "INSERT INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
293
300
  end
301
+ when Dataset
302
+ "INSERT INTO #{from} #{literal(values)}"
294
303
  end
295
304
  end
296
305
 
@@ -698,6 +707,10 @@ module Sequel
698
707
  def update_sql(values = {}, opts = nil)
699
708
  opts = opts ? @opts.merge(opts) : @opts
700
709
 
710
+ if sql = opts[:sql]
711
+ return sql
712
+ end
713
+
701
714
  if opts[:group]
702
715
  raise Error::InvalidOperation, "A grouped dataset cannot be updated"
703
716
  elsif (opts[:from].size > 1) or opts[:join]
@@ -706,6 +719,8 @@ module Sequel
706
719
 
707
720
  sql = "UPDATE #{source_list(@opts[:from])} SET "
708
721
  set = if values.is_a?(Hash)
722
+ values = opts[:defaults].merge(values) if opts[:defaults]
723
+ values = values.merge(opts[:overrides]) if opts[:overrides]
709
724
  # get values from hash
710
725
  values = transform_save(values) if @transform
711
726
  values.map do |k, v|
@@ -27,4 +27,8 @@ module Sequel
27
27
  # A transaction block will catch this error and won't pass further up the stack.
28
28
  class Rollback < Error ; end
29
29
  end
30
+
31
+ # Generic error raised by the database adapters, indicating a
32
+ # problem originating from the database server.
33
+ class DatabaseError < Error; end
30
34
  end
@@ -48,6 +48,8 @@ module Sequel
48
48
  # to whatever the database default is.
49
49
  # * :on_delete - Specify the behavior of this column when being deleted.
50
50
  # See Schema::SQL#on_delete_clause for options.
51
+ # * :on_update - Specify the behavior of this column when being updated.
52
+ # See Schema::SQL#on_delete_clause for options.
51
53
  # * :size - The size of the column, generally used with string
52
54
  # columns to specify the maximum number of characters the column will hold.
53
55
  # * :unique - Mark the column is unique, generally has the same effect as
@@ -62,7 +64,8 @@ module Sequel
62
64
  # Adds a named constraint (or unnamed if name is nil) to the DDL,
63
65
  # with the given block or args.
64
66
  def constraint(name, *args, &block)
65
- @columns << {:name => name, :type => :check, :check => block || args}
67
+ @columns << {:name => name, :type => :check, :check => block || args,
68
+ :constraint_type => :check}
66
69
  end
67
70
 
68
71
  # Return the DDL created by the generator as a array of two elements,
@@ -85,6 +88,7 @@ module Sequel
85
88
  else
86
89
  raise(Error, "The seconds argument to foreign_key should be a Hash, Symbol, or nil")
87
90
  end
91
+ return composite_foreign_key(name, opts) if name.is_a?(Array)
88
92
  column(name, :integer, opts)
89
93
  end
90
94
 
@@ -118,6 +122,7 @@ module Sequel
118
122
  # can optionally provide a type argument and/or an options hash argument
119
123
  # to change the primary key options. See column for available options.
120
124
  def primary_key(name, *args)
125
+ return composite_primary_key(name, *args) if name.is_a?(Array)
121
126
  @primary_key = @db.serial_primary_key_options.merge({:name => name})
122
127
 
123
128
  if opts = args.pop
@@ -129,7 +134,7 @@ module Sequel
129
134
  end
130
135
  @primary_key
131
136
  end
132
-
137
+
133
138
  # The name of the primary key for this table, if it has a primary key.
134
139
  def primary_key_name
135
140
  @primary_key[:name] if @primary_key
@@ -143,6 +148,21 @@ module Sequel
143
148
  # Add a unique index on the given columns to the DDL.
144
149
  def unique(columns, opts = {})
145
150
  index(columns, opts.merge(:unique => true))
151
+ @columns << {:type => :check, :constraint_type => :unique,
152
+ :name => nil, :columns => Array(columns)}.merge(opts)
153
+ end
154
+
155
+ private
156
+
157
+ def composite_primary_key(columns, *args)
158
+ opts = args.pop || {}
159
+ @columns << {:type => :check, :constraint_type => :primary_key,
160
+ :name => nil, :columns => columns}.merge(opts)
161
+ end
162
+
163
+ def composite_foreign_key(columns, opts)
164
+ @columns << {:type => :check, :constraint_type => :foreign_key,
165
+ :name => nil, :columns => columns }.merge(opts)
146
166
  end
147
167
  end
148
168
 
@@ -170,12 +190,18 @@ module Sequel
170
190
  # See Generator#constraint.
171
191
  def add_constraint(name, *args, &block)
172
192
  @operations << {:op => :add_constraint, :name => name, :type => :check, \
173
- :check => block || args}
193
+ :constraint_type => :check, :check => block || args}
194
+ end
195
+
196
+ def add_unique_constraint(columns, opts = {})
197
+ @operations << {:op => :add_constraint, :type => :check,
198
+ :constraint_type => :unique, :columns => Array(columns)}.merge(opts)
174
199
  end
175
200
 
176
201
  # Add a foreign key with the given name and referencing the given table
177
202
  # to the DDL for the table. See Generator#column for the available options.
178
203
  def add_foreign_key(name, table, opts = {})
204
+ return add_composite_foreign_key(name, table, opts) if name.is_a?(Array)
179
205
  add_column(name, :integer, {:table=>table}.merge(opts))
180
206
  end
181
207
 
@@ -194,6 +220,7 @@ module Sequel
194
220
  # Add a primary key to the DDL for the table. See Generator#column
195
221
  # for the available options.
196
222
  def add_primary_key(name, opts = {})
223
+ return add_composite_primary_key(name, opts) if name.is_a?(Array)
197
224
  opts = @db.serial_primary_key_options.merge(opts)
198
225
  add_column(name, opts.delete(:type), opts)
199
226
  end
@@ -233,6 +260,19 @@ module Sequel
233
260
  def set_column_type(name, type)
234
261
  @operations << {:op => :set_column_type, :name => name, :type => type}
235
262
  end
263
+
264
+ private
265
+
266
+ def add_composite_primary_key(columns, opts)
267
+ @operations << {:op => :add_constraint, :type => :check,
268
+ :constraint_type => :primary_key, :columns => columns}.merge(opts)
269
+ end
270
+
271
+ def add_composite_foreign_key(columns, table, opts)
272
+ @operations << {:op => :add_constraint, :type => :check,
273
+ :constraint_type => :foreign_key, :columns => columns,
274
+ :table => table}.merge(opts)
275
+ end
236
276
  end
237
277
  end
238
278
  end
@@ -22,28 +22,29 @@ module Sequel
22
22
  def alter_table_sql(table, op)
23
23
  quoted_table = quote_identifier(table)
24
24
  quoted_name = quote_identifier(op[:name]) if op[:name]
25
- case op[:op]
25
+ alter_table_op = case op[:op]
26
26
  when :add_column
27
- "ALTER TABLE #{quoted_table} ADD COLUMN #{column_definition_sql(op)}"
27
+ "ADD COLUMN #{column_definition_sql(op)}"
28
28
  when :drop_column
29
- "ALTER TABLE #{quoted_table} DROP COLUMN #{quoted_name}"
29
+ "DROP COLUMN #{quoted_name}"
30
30
  when :rename_column
31
- "ALTER TABLE #{quoted_table} RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
31
+ "RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
32
32
  when :set_column_type
33
- "ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} TYPE #{op[:type]}"
33
+ "ALTER COLUMN #{quoted_name} TYPE #{op[:type]}"
34
34
  when :set_column_default
35
- "ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
35
+ "ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
36
36
  when :add_index
37
- index_definition_sql(table, op)
37
+ return index_definition_sql(table, op)
38
38
  when :drop_index
39
- "DROP INDEX #{default_index_name(table, op[:columns])}"
39
+ return drop_index_sql(table, op)
40
40
  when :add_constraint
41
- "ALTER TABLE #{quoted_table} ADD #{constraint_definition_sql(op)}"
41
+ "ADD #{constraint_definition_sql(op)}"
42
42
  when :drop_constraint
43
- "ALTER TABLE #{quoted_table} DROP CONSTRAINT #{quoted_name}"
43
+ "DROP CONSTRAINT #{quoted_name}"
44
44
  else
45
45
  raise Error, "Unsupported ALTER TABLE operation"
46
46
  end
47
+ "ALTER TABLE #{quoted_table} #{alter_table_op}"
47
48
  end
48
49
 
49
50
  # Array of SQL DDL modification statements for the given table,
@@ -61,22 +62,14 @@ module Sequel
61
62
  # SQL DDL fragment containing the column creation SQL for the given column.
62
63
  def column_definition_sql(column)
63
64
  return constraint_definition_sql(column) if column[:type] == :check
64
- sql = "#{quote_identifier(column[:name])} #{type_literal(TYPES[column[:type]])}"
65
- column[:size] ||= 255 if column[:type] == :varchar
66
- elements = column[:size] || column[:elements]
67
- sql << literal(Array(elements)) if elements
68
- sql << UNSIGNED if column[:unsigned]
65
+ sql = "#{quote_identifier(column[:name])} #{type_literal(column)}"
69
66
  sql << UNIQUE if column[:unique]
70
67
  sql << NOT_NULL if column[:null] == false
71
68
  sql << NULL if column[:null] == true
72
69
  sql << " DEFAULT #{literal(column[:default])}" if column.include?(:default)
73
70
  sql << PRIMARY_KEY if column[:primary_key]
74
71
  sql << " #{auto_increment_sql}" if column[:auto_increment]
75
- if column[:table]
76
- sql << " REFERENCES #{quote_identifier(column[:table])}"
77
- sql << "(#{quote_identifier(column[:key])})" if column[:key]
78
- sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete]
79
- end
72
+ sql << column_references_sql(column) if column[:table]
80
73
  sql
81
74
  end
82
75
 
@@ -85,11 +78,30 @@ module Sequel
85
78
  def column_list_sql(columns)
86
79
  columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)
87
80
  end
81
+
82
+ # SQL DDL fragment for column foreign key references
83
+ def column_references_sql(column)
84
+ sql = " REFERENCES #{quote_identifier(column[:table])}"
85
+ sql << "(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(COMMA_SEPARATOR)})" if column[:key]
86
+ sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete]
87
+ sql << " ON UPDATE #{on_delete_clause(column[:on_update])}" if column[:on_update]
88
+ sql
89
+ end
88
90
 
89
91
  # SQL DDL fragment specifying a constraint on a table.
90
92
  def constraint_definition_sql(constraint)
91
93
  sql = constraint[:name] ? "CONSTRAINT #{quote_identifier(constraint[:name])} " : ""
92
- sql << "CHECK #{filter_expr(constraint[:check])}"
94
+ case constraint[:constraint_type]
95
+ when :primary_key
96
+ sql << "PRIMARY KEY #{literal(constraint[:columns])}"
97
+ when :foreign_key
98
+ sql << "FOREIGN KEY #{literal(constraint[:columns])}"
99
+ sql << column_references_sql(constraint)
100
+ when :unique
101
+ sql << "UNIQUE #{literal(constraint[:columns])}"
102
+ else
103
+ sql << "CHECK #{filter_expr(constraint[:check])}"
104
+ end
93
105
  sql
94
106
  end
95
107
 
@@ -108,6 +120,11 @@ module Sequel
108
120
  "#{table_name}_#{columns.join(UNDERSCORE)}_index"
109
121
  end
110
122
 
123
+ # The SQL to drop an index for the table.
124
+ def drop_index_sql(table, op)
125
+ "DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}"
126
+ end
127
+
111
128
  # SQL DDL statement to drop the table with the given name.
112
129
  def drop_table_sql(name)
113
130
  "DROP TABLE #{quote_identifier(name)}"
@@ -127,7 +144,7 @@ module Sequel
127
144
  elsif index[:where]
128
145
  raise Error, "Partial indexes are not supported for this database"
129
146
  else
130
- "CREATE #{'UNIQUE ' if index[:unique]}INDEX #{index_name} ON #{quote_identifier(table_name)} #{literal(index[:columns])}"
147
+ "CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}"
131
148
  end
132
149
  end
133
150
 
@@ -215,10 +232,6 @@ module Sequel
215
232
  @schema_utility_dataset ||= dataset
216
233
  end
217
234
 
218
- # SQL fragment specifying the type of a given column.
219
- def type_literal(t)
220
- t.is_a?(Symbol) ? t.to_s : literal(t)
221
- end
222
235
 
223
236
  private
224
237
 
@@ -320,6 +333,19 @@ module Sequel
320
333
  end
321
334
  schema
322
335
  end
336
+
337
+ # SQL fragment specifying the type of a given column.
338
+ def type_literal(column)
339
+ column[:size] ||= 255 if column[:type] == :varchar
340
+ elements = column[:size] || column[:elements]
341
+ "#{type_literal_base(column)}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}"
342
+ end
343
+
344
+ # SQL fragment specifying the base type of a given column,
345
+ # without the size or elements.
346
+ def type_literal_base(column)
347
+ TYPES[column[:type]]
348
+ end
323
349
  end
324
350
  end
325
351
  end