sequel_core 1.5.1 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +116 -0
- data/COPYING +19 -19
- data/README +83 -32
- data/Rakefile +9 -20
- data/bin/sequel +43 -112
- data/doc/cheat_sheet.rdoc +225 -0
- data/doc/dataset_filtering.rdoc +257 -0
- data/lib/sequel_core/adapters/adapter_skeleton.rb +4 -2
- data/lib/sequel_core/adapters/ado.rb +3 -1
- data/lib/sequel_core/adapters/db2.rb +4 -2
- data/lib/sequel_core/adapters/dbi.rb +127 -113
- data/lib/sequel_core/adapters/informix.rb +4 -2
- data/lib/sequel_core/adapters/jdbc.rb +5 -3
- data/lib/sequel_core/adapters/mysql.rb +112 -46
- data/lib/sequel_core/adapters/odbc.rb +5 -7
- data/lib/sequel_core/adapters/odbc_mssql.rb +12 -3
- data/lib/sequel_core/adapters/openbase.rb +3 -1
- data/lib/sequel_core/adapters/oracle.rb +11 -9
- data/lib/sequel_core/adapters/postgres.rb +261 -262
- data/lib/sequel_core/adapters/sqlite.rb +72 -22
- data/lib/sequel_core/connection_pool.rb +140 -73
- data/lib/sequel_core/core_ext.rb +201 -66
- data/lib/sequel_core/core_sql.rb +123 -153
- data/lib/sequel_core/database/schema.rb +156 -0
- data/lib/sequel_core/database.rb +321 -338
- data/lib/sequel_core/dataset/callback.rb +11 -12
- data/lib/sequel_core/dataset/convenience.rb +213 -240
- data/lib/sequel_core/dataset/pagination.rb +58 -43
- data/lib/sequel_core/dataset/parse_tree_sequelizer.rb +331 -0
- data/lib/sequel_core/dataset/query.rb +41 -0
- data/lib/sequel_core/dataset/schema.rb +15 -0
- data/lib/sequel_core/dataset/sequelizer.rb +41 -373
- data/lib/sequel_core/dataset/sql.rb +741 -632
- data/lib/sequel_core/dataset.rb +183 -168
- data/lib/sequel_core/deprecated.rb +1 -169
- data/lib/sequel_core/exceptions.rb +24 -19
- data/lib/sequel_core/migration.rb +44 -52
- data/lib/sequel_core/object_graph.rb +43 -42
- data/lib/sequel_core/pretty_table.rb +71 -76
- data/lib/sequel_core/schema/generator.rb +163 -105
- data/lib/sequel_core/schema/sql.rb +250 -93
- data/lib/sequel_core/schema.rb +2 -8
- data/lib/sequel_core/sql.rb +394 -0
- data/lib/sequel_core/worker.rb +37 -27
- data/lib/sequel_core.rb +99 -45
- data/spec/adapters/informix_spec.rb +0 -1
- data/spec/adapters/mysql_spec.rb +177 -124
- data/spec/adapters/oracle_spec.rb +0 -1
- data/spec/adapters/postgres_spec.rb +98 -58
- data/spec/adapters/sqlite_spec.rb +45 -4
- data/spec/blockless_filters_spec.rb +269 -0
- data/spec/connection_pool_spec.rb +21 -18
- data/spec/core_ext_spec.rb +169 -19
- data/spec/core_sql_spec.rb +56 -49
- data/spec/database_spec.rb +78 -17
- data/spec/dataset_spec.rb +300 -428
- data/spec/migration_spec.rb +1 -1
- data/spec/object_graph_spec.rb +5 -11
- data/spec/rcov.opts +1 -1
- data/spec/schema_generator_spec.rb +16 -4
- data/spec/schema_spec.rb +89 -10
- data/spec/sequelizer_spec.rb +56 -56
- data/spec/spec.opts +0 -5
- data/spec/spec_config.rb +7 -0
- data/spec/spec_config.rb.example +5 -5
- data/spec/spec_helper.rb +6 -0
- data/spec/worker_spec.rb +1 -1
- metadata +78 -63
@@ -1,67 +1,70 @@
|
|
1
1
|
module Sequel
|
2
2
|
module Schema
|
3
3
|
module SQL
|
4
|
-
RESTRICT = 'RESTRICT'.freeze
|
5
|
-
CASCADE = 'CASCADE'.freeze
|
6
|
-
NO_ACTION = 'NO ACTION'.freeze
|
7
|
-
SET_NULL = 'SET NULL'.freeze
|
8
|
-
SET_DEFAULT = 'SET DEFAULT'.freeze
|
9
|
-
|
10
|
-
def on_delete_clause(action)
|
11
|
-
case action
|
12
|
-
when :restrict
|
13
|
-
RESTRICT
|
14
|
-
when :cascade
|
15
|
-
CASCADE
|
16
|
-
when :set_null
|
17
|
-
SET_NULL
|
18
|
-
when :set_default
|
19
|
-
SET_DEFAULT
|
20
|
-
else
|
21
|
-
NO_ACTION
|
22
|
-
end
|
23
|
-
end
|
24
|
-
|
25
4
|
AUTOINCREMENT = 'AUTOINCREMENT'.freeze
|
26
|
-
|
27
|
-
def auto_increment_sql
|
28
|
-
AUTOINCREMENT
|
29
|
-
end
|
30
|
-
|
5
|
+
CASCADE = 'CASCADE'.freeze
|
31
6
|
COMMA_SEPARATOR = ', '.freeze
|
32
|
-
|
7
|
+
NO_ACTION = 'NO ACTION'.freeze
|
33
8
|
NOT_NULL = ' NOT NULL'.freeze
|
34
9
|
NULL = ' NULL'.freeze
|
35
|
-
UNSIGNED = ' UNSIGNED'.freeze
|
36
10
|
PRIMARY_KEY = ' PRIMARY KEY'.freeze
|
37
|
-
|
11
|
+
RESTRICT = 'RESTRICT'.freeze
|
12
|
+
SET_DEFAULT = 'SET DEFAULT'.freeze
|
13
|
+
SET_NULL = 'SET NULL'.freeze
|
38
14
|
TYPES = Hash.new {|h, k| k}
|
39
15
|
TYPES[:double] = 'double precision'
|
40
|
-
|
41
|
-
|
42
|
-
|
16
|
+
UNDERSCORE = '_'.freeze
|
17
|
+
UNIQUE = ' UNIQUE'.freeze
|
18
|
+
UNSIGNED = ' UNSIGNED'.freeze
|
19
|
+
|
20
|
+
# The SQL to execute to modify the DDL for the given table name. op
|
21
|
+
# should be one of the operations returned by the AlterTableGenerator.
|
22
|
+
def alter_table_sql(table, op)
|
23
|
+
quoted_table = quote_identifier(table)
|
24
|
+
quoted_name = quote_identifier(op[:name]) if op[:name]
|
25
|
+
case op[:op]
|
26
|
+
when :add_column
|
27
|
+
"ALTER TABLE #{quoted_table} ADD COLUMN #{column_definition_sql(op)}"
|
28
|
+
when :drop_column
|
29
|
+
"ALTER TABLE #{quoted_table} DROP COLUMN #{quoted_name}"
|
30
|
+
when :rename_column
|
31
|
+
"ALTER TABLE #{quoted_table} RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
|
32
|
+
when :set_column_type
|
33
|
+
"ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} TYPE #{op[:type]}"
|
34
|
+
when :set_column_default
|
35
|
+
"ALTER TABLE #{quoted_table} ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
|
36
|
+
when :add_index
|
37
|
+
index_definition_sql(table, op)
|
38
|
+
when :drop_index
|
39
|
+
"DROP INDEX #{default_index_name(table, op[:columns])}"
|
40
|
+
when :add_constraint
|
41
|
+
"ALTER TABLE #{quoted_table} ADD #{constraint_definition_sql(op)}"
|
42
|
+
when :drop_constraint
|
43
|
+
"ALTER TABLE #{quoted_table} DROP CONSTRAINT #{quoted_name}"
|
44
|
+
else
|
45
|
+
raise Error, "Unsupported ALTER TABLE operation"
|
46
|
+
end
|
43
47
|
end
|
44
|
-
|
45
|
-
|
46
|
-
|
48
|
+
|
49
|
+
# Array of SQL DDL modification statements for the given table,
|
50
|
+
# corresponding to the DDL changes specified by the operations.
|
51
|
+
def alter_table_sql_list(table, operations)
|
52
|
+
operations.map{|op| alter_table_sql(table, op)}
|
47
53
|
end
|
48
54
|
|
49
|
-
|
50
|
-
|
55
|
+
# The SQL string specify the autoincrement property, generally used by
|
56
|
+
# primary keys.
|
57
|
+
def auto_increment_sql
|
58
|
+
AUTOINCREMENT
|
51
59
|
end
|
52
60
|
|
53
|
-
|
54
|
-
schema_utility_dataset.send(:expression_list, *args, &block)
|
55
|
-
end
|
56
|
-
|
61
|
+
# SQL DDL fragment containing the column creation SQL for the given column.
|
57
62
|
def column_definition_sql(column)
|
58
|
-
if column[:type] == :check
|
59
|
-
|
60
|
-
end
|
61
|
-
sql = "#{literal(column[:name].to_sym)} #{type_literal(TYPES[column[:type]])}"
|
63
|
+
return constraint_definition_sql(column) if column[:type] == :check
|
64
|
+
sql = "#{quote_identifier(column[:name])} #{type_literal(TYPES[column[:type]])}"
|
62
65
|
column[:size] ||= 255 if column[:type] == :varchar
|
63
66
|
elements = column[:size] || column[:elements]
|
64
|
-
sql <<
|
67
|
+
sql << literal(Array(elements)) if elements
|
65
68
|
sql << UNSIGNED if column[:unsigned]
|
66
69
|
sql << UNIQUE if column[:unique]
|
67
70
|
sql << NOT_NULL if column[:null] == false
|
@@ -70,92 +73,246 @@ module Sequel
|
|
70
73
|
sql << PRIMARY_KEY if column[:primary_key]
|
71
74
|
sql << " #{auto_increment_sql}" if column[:auto_increment]
|
72
75
|
if column[:table]
|
73
|
-
sql << " REFERENCES #{column[:table]}"
|
74
|
-
sql << "(#{column[:key]})" if column[:key]
|
76
|
+
sql << " REFERENCES #{quote_identifier(column[:table])}"
|
77
|
+
sql << "(#{quote_identifier(column[:key])})" if column[:key]
|
75
78
|
sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete]
|
76
79
|
end
|
77
80
|
sql
|
78
81
|
end
|
79
82
|
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
sql << "CHECK #{expression_list(column[:check], true)}"
|
84
|
-
sql
|
85
|
-
end
|
86
|
-
|
83
|
+
# SQL DDL fragment containing the column creation
|
84
|
+
# SQL for all given columns, used instead a CREATE TABLE block.
|
87
85
|
def column_list_sql(columns)
|
88
|
-
columns.map
|
86
|
+
columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)
|
89
87
|
end
|
90
88
|
|
91
|
-
|
89
|
+
# SQL DDL fragment specifying a constraint on a table.
|
90
|
+
def constraint_definition_sql(constraint)
|
91
|
+
sql = constraint[:name] ? "CONSTRAINT #{quote_identifier(constraint[:name])} " : ""
|
92
|
+
sql << "CHECK #{filter_expr(constraint[:check])}"
|
93
|
+
sql
|
94
|
+
end
|
92
95
|
|
96
|
+
# Array of SQL DDL statements, the first for creating a table with the given
|
97
|
+
# name and column specifications, and the others for specifying indexes on
|
98
|
+
# the table.
|
99
|
+
def create_table_sql_list(name, columns, indexes = nil)
|
100
|
+
sql = ["CREATE TABLE #{quote_identifier(name)} (#{column_list_sql(columns)})"]
|
101
|
+
sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?
|
102
|
+
sql
|
103
|
+
end
|
104
|
+
|
105
|
+
# Default index name for the table and columns, may be too long
|
106
|
+
# for certain databases.
|
93
107
|
def default_index_name(table_name, columns)
|
94
108
|
"#{table_name}_#{columns.join(UNDERSCORE)}_index"
|
95
109
|
end
|
96
110
|
|
111
|
+
# SQL DDL statement to drop the table with the given name.
|
112
|
+
def drop_table_sql(name)
|
113
|
+
"DROP TABLE #{quote_identifier(name)}"
|
114
|
+
end
|
115
|
+
|
116
|
+
# Proxy the filter_expr call to the dataset, used for creating constraints.
|
117
|
+
def filter_expr(*args, &block)
|
118
|
+
schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))
|
119
|
+
end
|
120
|
+
|
121
|
+
# SQL DDL statement for creating an index for the table with the given name
|
122
|
+
# and index specifications.
|
97
123
|
def index_definition_sql(table_name, index)
|
98
124
|
index_name = index[:name] || default_index_name(table_name, index[:columns])
|
99
125
|
if index[:type]
|
100
126
|
raise Error, "Index types are not supported for this database"
|
101
127
|
elsif index[:where]
|
102
128
|
raise Error, "Partial indexes are not supported for this database"
|
103
|
-
elsif index[:unique]
|
104
|
-
"CREATE UNIQUE INDEX #{index_name} ON #{table_name} (#{literal(index[:columns])})"
|
105
129
|
else
|
106
|
-
"CREATE INDEX #{index_name} ON #{table_name}
|
130
|
+
"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{index_name} ON #{quote_identifier(table_name)} #{literal(index[:columns])}"
|
107
131
|
end
|
108
132
|
end
|
109
133
|
|
134
|
+
# Array of SQL DDL statements, one for each index specification,
|
135
|
+
# for the given table.
|
110
136
|
def index_list_sql_list(table_name, indexes)
|
111
|
-
indexes.map
|
137
|
+
indexes.map{|i| index_definition_sql(table_name, i)}
|
112
138
|
end
|
113
139
|
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
140
|
+
# Proxy the literal call to the dataset, used for default values.
|
141
|
+
def literal(v)
|
142
|
+
schema_utility_dataset.literal(v)
|
143
|
+
end
|
144
|
+
|
145
|
+
# SQL DDL ON DELETE fragment to use, based on the given action.
|
146
|
+
# The following actions are recognized:
|
147
|
+
#
|
148
|
+
# * :cascade - Delete rows referencing this row.
|
149
|
+
# * :no_action (default) - Raise an error if other rows reference this
|
150
|
+
# row, allow deferring of the integrity check.
|
151
|
+
# * :restrict - Raise an error if other rows reference this row,
|
152
|
+
# but do not allow deferring the integrity check.
|
153
|
+
# * :set_default - Set columns referencing this row to their default value.
|
154
|
+
# * :set_null - Set columns referencing this row to NULL.
|
155
|
+
def on_delete_clause(action)
|
156
|
+
case action
|
157
|
+
when :restrict
|
158
|
+
RESTRICT
|
159
|
+
when :cascade
|
160
|
+
CASCADE
|
161
|
+
when :set_null
|
162
|
+
SET_NULL
|
163
|
+
when :set_default
|
164
|
+
SET_DEFAULT
|
165
|
+
else
|
166
|
+
NO_ACTION
|
118
167
|
end
|
119
|
-
sql
|
120
168
|
end
|
121
169
|
|
122
|
-
|
123
|
-
|
170
|
+
# Proxy the quote_identifier method to the dataset, used for quoting tables and columns.
|
171
|
+
def quote_identifier(v)
|
172
|
+
schema_utility_dataset.quote_identifier(v)
|
124
173
|
end
|
125
174
|
|
175
|
+
# SQL DDL statement for renaming a table.
|
126
176
|
def rename_table_sql(name, new_name)
|
127
|
-
"ALTER TABLE #{name} RENAME TO #{new_name}"
|
177
|
+
"ALTER TABLE #{quote_identifier(name)} RENAME TO #{quote_identifier(new_name)}"
|
178
|
+
end
|
179
|
+
|
180
|
+
# Parse the schema from the database using the SQL standard INFORMATION_SCHEMA.
|
181
|
+
# If the table_name is not given, returns the schema for all tables as a hash.
|
182
|
+
# If the table_name is given, returns the schema for a single table as an
|
183
|
+
# array with all members being arrays of length 2. Available options are:
|
184
|
+
#
|
185
|
+
# * :reload - Get fresh information from the database, instead of using
|
186
|
+
# cached information. If table_name is blank, :reload should be used
|
187
|
+
# unless you are sure that schema has not been called before with a
|
188
|
+
# table_name, otherwise you may only getting the schemas for tables
|
189
|
+
# that have been requested explicitly.
|
190
|
+
def schema(table_name = nil, opts={})
|
191
|
+
if opts[:reload] && @schemas
|
192
|
+
if table_name
|
193
|
+
@schemas.delete(table_name)
|
194
|
+
else
|
195
|
+
@schemas = nil
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
if table_name
|
200
|
+
return @schemas[table_name] if @schemas && @schemas[table_name]
|
201
|
+
else
|
202
|
+
return @schemas if @schemas
|
203
|
+
end
|
204
|
+
|
205
|
+
if table_name
|
206
|
+
@schemas ||= {}
|
207
|
+
@schemas[table_name] ||= schema_parse_table(table_name, opts)
|
208
|
+
else
|
209
|
+
@schemas = schema_parse_tables(opts)
|
210
|
+
end
|
128
211
|
end
|
129
212
|
|
130
|
-
|
131
|
-
|
213
|
+
# The dataset to use for proxying certain schema methods.
|
214
|
+
def schema_utility_dataset
|
215
|
+
@schema_utility_dataset ||= dataset
|
132
216
|
end
|
133
217
|
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
when
|
147
|
-
|
148
|
-
when
|
149
|
-
|
150
|
-
when
|
151
|
-
|
152
|
-
when
|
153
|
-
|
218
|
+
# SQL fragment specifying the type of a given column.
|
219
|
+
def type_literal(t)
|
220
|
+
t.is_a?(Symbol) ? t.to_s : literal(t)
|
221
|
+
end
|
222
|
+
|
223
|
+
private
|
224
|
+
|
225
|
+
# Match the database's column type to a ruby type via a
|
226
|
+
# regular expression. The following ruby types are supported:
|
227
|
+
# integer, string, date, datetime, boolean, and float.
|
228
|
+
def schema_column_type(db_type)
|
229
|
+
case db_type
|
230
|
+
when /\A(int(eger)?|bigint|smallint)\z/
|
231
|
+
:integer
|
232
|
+
when /\A(character( varying)?|varchar|text)\z/
|
233
|
+
:string
|
234
|
+
when /\A(date)\z/
|
235
|
+
:date
|
236
|
+
when /\A(datetime|time|timestamp( with(out)? time zone)?)\z/
|
237
|
+
:datetime
|
238
|
+
when /\A(boolean|tinyint)\z/
|
239
|
+
:boolean
|
240
|
+
when /\A(real|float|double( precision)?)\z/
|
241
|
+
:float
|
242
|
+
end
|
243
|
+
end
|
244
|
+
|
245
|
+
# The final dataset used by the schema parser, after all
|
246
|
+
# options have been applied.
|
247
|
+
def schema_ds(table_name, opts)
|
248
|
+
schema_ds_dataset.from(*schema_ds_from(table_name, opts)) \
|
249
|
+
.select(*schema_ds_select(table_name, opts)) \
|
250
|
+
.join(*schema_ds_join(table_name, opts)) \
|
251
|
+
.filter(*schema_ds_filter(table_name, opts))
|
252
|
+
end
|
253
|
+
|
254
|
+
# The blank dataset used by the schema parser.
|
255
|
+
def schema_ds_dataset
|
256
|
+
schema_utility_dataset
|
257
|
+
end
|
258
|
+
|
259
|
+
# Argument array for the schema dataset's filter method.
|
260
|
+
def schema_ds_filter(table_name, opts)
|
261
|
+
if table_name
|
262
|
+
[{:c__table_name=>table_name.to_s}]
|
154
263
|
else
|
155
|
-
|
264
|
+
[{:t__table_type=>'BASE TABLE'}]
|
156
265
|
end
|
157
266
|
end
|
267
|
+
|
268
|
+
# Argument array for the schema dataset's from method.
|
269
|
+
def schema_ds_from(table_name, opts)
|
270
|
+
[:information_schema__tables___t]
|
271
|
+
end
|
272
|
+
|
273
|
+
# Argument array for the schema dataset's join method.
|
274
|
+
def schema_ds_join(table_name, opts)
|
275
|
+
[:information_schema__columns, {:table_catalog=>:table_catalog,
|
276
|
+
:table_schema => :table_schema, :table_name => :table_name} , :c]
|
277
|
+
end
|
278
|
+
|
279
|
+
# Argument array for the schema dataset's select method.
|
280
|
+
def schema_ds_select(table_name, opts)
|
281
|
+
cols = [:column_name___column, :data_type___db_type, :character_maximum_length___max_chars, \
|
282
|
+
:numeric_precision, :column_default___default, :is_nullable___allow_null]
|
283
|
+
cols << :c__table_name unless table_name
|
284
|
+
cols
|
285
|
+
end
|
286
|
+
|
287
|
+
# Parse the schema for a given table.
|
288
|
+
def schema_parse_table(table_name, opts)
|
289
|
+
schema_parse_rows(schema_ds(table_name, opts))
|
290
|
+
end
|
291
|
+
|
292
|
+
# Parse the schema all tables in the database.
|
293
|
+
def schema_parse_tables(opts)
|
294
|
+
schemas = {}
|
295
|
+
schema_ds(nil, opts).each do |row|
|
296
|
+
(schemas[row.delete(:table_name).to_sym] ||= []) << row
|
297
|
+
end
|
298
|
+
schemas.each do |table, rows|
|
299
|
+
schemas[table] = schema_parse_rows(rows)
|
300
|
+
end
|
301
|
+
schemas
|
302
|
+
end
|
303
|
+
|
304
|
+
# Parse the output of the information schema columns into
|
305
|
+
# the hash used by Sequel.
|
306
|
+
def schema_parse_rows(rows)
|
307
|
+
schema = []
|
308
|
+
rows.each do |row|
|
309
|
+
row[:allow_null] = row[:allow_null] == 'YES' ? true : false
|
310
|
+
row[:default] = nil if row[:default].blank?
|
311
|
+
row[:type] = schema_column_type(row[:db_type])
|
312
|
+
schema << [row.delete(:column).to_sym, row]
|
313
|
+
end
|
314
|
+
schema
|
315
|
+
end
|
158
316
|
end
|
159
317
|
end
|
160
318
|
end
|
161
|
-
|
data/lib/sequel_core/schema.rb
CHANGED