sequel 2.11.0 → 2.12.0
Sign up to get free protection for your applications and to get access to all the features.
- data/CHANGELOG +168 -0
- data/README.rdoc +77 -95
- data/Rakefile +100 -80
- data/bin/sequel +2 -1
- data/doc/advanced_associations.rdoc +23 -32
- data/doc/cheat_sheet.rdoc +23 -40
- data/doc/dataset_filtering.rdoc +6 -6
- data/doc/prepared_statements.rdoc +22 -22
- data/doc/release_notes/2.12.0.txt +534 -0
- data/doc/schema.rdoc +3 -1
- data/doc/sharding.rdoc +8 -8
- data/doc/virtual_rows.rdoc +65 -0
- data/lib/sequel.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/ado.rb +3 -3
- data/lib/{sequel_core → sequel}/adapters/db2.rb +0 -0
- data/lib/{sequel_core → sequel}/adapters/dbi.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/do.rb +9 -5
- data/lib/{sequel_core → sequel}/adapters/do/mysql.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/do/postgres.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/do/sqlite.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/firebird.rb +84 -80
- data/lib/{sequel_core → sequel}/adapters/informix.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/jdbc.rb +21 -14
- data/lib/{sequel_core → sequel}/adapters/jdbc/h2.rb +14 -13
- data/lib/{sequel_core → sequel}/adapters/jdbc/mysql.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/jdbc/oracle.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/jdbc/postgresql.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/jdbc/sqlite.rb +1 -1
- data/lib/{sequel_core → sequel}/adapters/mysql.rb +60 -39
- data/lib/{sequel_core → sequel}/adapters/odbc.rb +8 -4
- data/lib/{sequel_core → sequel}/adapters/openbase.rb +0 -0
- data/lib/{sequel_core → sequel}/adapters/oracle.rb +38 -7
- data/lib/{sequel_core → sequel}/adapters/postgres.rb +24 -24
- data/lib/{sequel_core → sequel}/adapters/shared/mssql.rb +5 -5
- data/lib/{sequel_core → sequel}/adapters/shared/mysql.rb +126 -71
- data/lib/{sequel_core → sequel}/adapters/shared/oracle.rb +7 -10
- data/lib/{sequel_core → sequel}/adapters/shared/postgres.rb +159 -125
- data/lib/{sequel_core → sequel}/adapters/shared/progress.rb +1 -2
- data/lib/{sequel_core → sequel}/adapters/shared/sqlite.rb +72 -67
- data/lib/{sequel_core → sequel}/adapters/sqlite.rb +11 -7
- data/lib/{sequel_core → sequel}/adapters/utils/date_format.rb +0 -0
- data/lib/{sequel_core → sequel}/adapters/utils/stored_procedures.rb +0 -0
- data/lib/{sequel_core → sequel}/adapters/utils/unsupported.rb +19 -0
- data/lib/{sequel_core → sequel}/connection_pool.rb +7 -5
- data/lib/sequel/core.rb +221 -0
- data/lib/{sequel_core → sequel}/core_sql.rb +91 -49
- data/lib/{sequel_core → sequel}/database.rb +264 -149
- data/lib/{sequel_core/schema/generator.rb → sequel/database/schema_generator.rb} +6 -2
- data/lib/{sequel_core/database/schema.rb → sequel/database/schema_methods.rb} +12 -12
- data/lib/sequel/database/schema_sql.rb +224 -0
- data/lib/{sequel_core → sequel}/dataset.rb +78 -236
- data/lib/{sequel_core → sequel}/dataset/convenience.rb +99 -61
- data/lib/{sequel_core/object_graph.rb → sequel/dataset/graph.rb} +16 -14
- data/lib/{sequel_core → sequel}/dataset/prepared_statements.rb +1 -1
- data/lib/{sequel_core → sequel}/dataset/sql.rb +150 -99
- data/lib/sequel/deprecated.rb +593 -0
- data/lib/sequel/deprecated_migration.rb +91 -0
- data/lib/sequel/exceptions.rb +48 -0
- data/lib/sequel/extensions/blank.rb +42 -0
- data/lib/{sequel_model → sequel/extensions}/inflector.rb +8 -1
- data/lib/{sequel_core → sequel/extensions}/migration.rb +1 -1
- data/lib/{sequel_core/dataset → sequel/extensions}/pagination.rb +0 -0
- data/lib/{sequel_core → sequel/extensions}/pretty_table.rb +7 -0
- data/lib/{sequel_core/dataset → sequel/extensions}/query.rb +7 -0
- data/lib/sequel/extensions/string_date_time.rb +47 -0
- data/lib/sequel/metaprogramming.rb +43 -0
- data/lib/sequel/model.rb +110 -0
- data/lib/sequel/model/associations.rb +1300 -0
- data/lib/sequel/model/base.rb +937 -0
- data/lib/sequel/model/deprecated.rb +204 -0
- data/lib/sequel/model/deprecated_hooks.rb +103 -0
- data/lib/sequel/model/deprecated_inflector.rb +335 -0
- data/lib/sequel/model/deprecated_validations.rb +388 -0
- data/lib/sequel/model/errors.rb +39 -0
- data/lib/{sequel_model → sequel/model}/exceptions.rb +4 -4
- data/lib/sequel/model/inflections.rb +208 -0
- data/lib/sequel/model/plugins.rb +76 -0
- data/lib/sequel/plugins/caching.rb +122 -0
- data/lib/sequel/plugins/hook_class_methods.rb +122 -0
- data/lib/sequel/plugins/schema.rb +53 -0
- data/lib/sequel/plugins/serialization.rb +117 -0
- data/lib/sequel/plugins/single_table_inheritance.rb +63 -0
- data/lib/sequel/plugins/validation_class_methods.rb +384 -0
- data/lib/sequel/plugins/validation_helpers.rb +150 -0
- data/lib/{sequel_core → sequel}/sql.rb +125 -190
- data/lib/{sequel_core → sequel}/version.rb +2 -1
- data/lib/sequel_core.rb +1 -172
- data/lib/sequel_model.rb +1 -91
- data/spec/adapters/firebird_spec.rb +5 -5
- data/spec/adapters/informix_spec.rb +1 -1
- data/spec/adapters/mysql_spec.rb +128 -42
- data/spec/adapters/oracle_spec.rb +47 -19
- data/spec/adapters/postgres_spec.rb +64 -52
- data/spec/adapters/spec_helper.rb +1 -1
- data/spec/adapters/sqlite_spec.rb +12 -17
- data/spec/{sequel_core → core}/connection_pool_spec.rb +10 -10
- data/spec/{sequel_core → core}/core_ext_spec.rb +19 -19
- data/spec/{sequel_core → core}/core_sql_spec.rb +68 -71
- data/spec/{sequel_core → core}/database_spec.rb +135 -99
- data/spec/{sequel_core → core}/dataset_spec.rb +398 -242
- data/spec/{sequel_core → core}/expression_filters_spec.rb +13 -13
- data/spec/core/migration_spec.rb +263 -0
- data/spec/{sequel_core → core}/object_graph_spec.rb +10 -10
- data/spec/{sequel_core → core}/pretty_table_spec.rb +2 -2
- data/spec/{sequel_core → core}/schema_generator_spec.rb +0 -0
- data/spec/{sequel_core → core}/schema_spec.rb +8 -10
- data/spec/{sequel_core → core}/spec_helper.rb +29 -2
- data/spec/{sequel_core → core}/version_spec.rb +0 -0
- data/spec/extensions/blank_spec.rb +67 -0
- data/spec/extensions/caching_spec.rb +201 -0
- data/spec/{sequel_model/hooks_spec.rb → extensions/hook_class_methods_spec.rb} +8 -23
- data/spec/{sequel_model → extensions}/inflector_spec.rb +3 -0
- data/spec/{sequel_core → extensions}/migration_spec.rb +4 -4
- data/spec/extensions/pagination_spec.rb +99 -0
- data/spec/extensions/pretty_table_spec.rb +91 -0
- data/spec/extensions/query_spec.rb +85 -0
- data/spec/{sequel_model → extensions}/schema_spec.rb +22 -1
- data/spec/extensions/serialization_spec.rb +109 -0
- data/spec/extensions/single_table_inheritance_spec.rb +53 -0
- data/spec/{sequel_model → extensions}/spec_helper.rb +13 -4
- data/spec/extensions/string_date_time_spec.rb +93 -0
- data/spec/{sequel_model/validations_spec.rb → extensions/validation_class_methods_spec.rb} +15 -103
- data/spec/extensions/validation_helpers_spec.rb +291 -0
- data/spec/integration/dataset_test.rb +31 -0
- data/spec/integration/eager_loader_test.rb +17 -30
- data/spec/integration/schema_test.rb +8 -5
- data/spec/integration/spec_helper.rb +17 -0
- data/spec/integration/transaction_test.rb +68 -0
- data/spec/{sequel_model → model}/association_reflection_spec.rb +0 -0
- data/spec/{sequel_model → model}/associations_spec.rb +23 -10
- data/spec/{sequel_model → model}/base_spec.rb +29 -20
- data/spec/{sequel_model → model}/caching_spec.rb +16 -14
- data/spec/{sequel_model → model}/dataset_methods_spec.rb +0 -0
- data/spec/{sequel_model → model}/eager_loading_spec.rb +8 -8
- data/spec/model/hooks_spec.rb +472 -0
- data/spec/model/inflector_spec.rb +126 -0
- data/spec/{sequel_model → model}/model_spec.rb +25 -20
- data/spec/model/plugins_spec.rb +142 -0
- data/spec/{sequel_model → model}/record_spec.rb +121 -62
- data/spec/model/schema_spec.rb +92 -0
- data/spec/model/spec_helper.rb +124 -0
- data/spec/model/validations_spec.rb +1080 -0
- metadata +136 -107
- data/lib/sequel_core/core_ext.rb +0 -217
- data/lib/sequel_core/dataset/callback.rb +0 -13
- data/lib/sequel_core/dataset/schema.rb +0 -15
- data/lib/sequel_core/deprecated.rb +0 -26
- data/lib/sequel_core/exceptions.rb +0 -44
- data/lib/sequel_core/schema.rb +0 -2
- data/lib/sequel_core/schema/sql.rb +0 -325
- data/lib/sequel_model/association_reflection.rb +0 -267
- data/lib/sequel_model/associations.rb +0 -499
- data/lib/sequel_model/base.rb +0 -539
- data/lib/sequel_model/caching.rb +0 -82
- data/lib/sequel_model/dataset_methods.rb +0 -26
- data/lib/sequel_model/eager_loading.rb +0 -370
- data/lib/sequel_model/hooks.rb +0 -101
- data/lib/sequel_model/plugins.rb +0 -62
- data/lib/sequel_model/record.rb +0 -568
- data/lib/sequel_model/schema.rb +0 -49
- data/lib/sequel_model/validations.rb +0 -429
- data/spec/sequel_model/plugins_spec.rb +0 -80
@@ -62,12 +62,12 @@ module Sequel
|
|
62
62
|
# or not allowing NULL values (if false). If unspecified, will default
|
63
63
|
# to whatever the database default is.
|
64
64
|
# * :on_delete - Specify the behavior of this column when being deleted.
|
65
|
-
# See
|
65
|
+
# See Dataset#on_delete_clause for options.
|
66
66
|
# * :on_update - Specify the behavior of this column when being updated.
|
67
67
|
# See Schema::SQL#on_delete_clause for options.
|
68
68
|
# * :size - The size of the column, generally used with string
|
69
69
|
# columns to specify the maximum number of characters the column will hold.
|
70
|
-
# * :unique - Mark the column
|
70
|
+
# * :unique - Mark the column as unique, generally has the same effect as
|
71
71
|
# creating a unique index on the column.
|
72
72
|
# * :unsigned - Make the column type unsigned, only useful for integer
|
73
73
|
# columns.
|
@@ -177,12 +177,14 @@ module Sequel
|
|
177
177
|
|
178
178
|
private
|
179
179
|
|
180
|
+
# Add a composite primary key constraint
|
180
181
|
def composite_primary_key(columns, *args)
|
181
182
|
opts = args.pop || {}
|
182
183
|
@columns << {:type => :check, :constraint_type => :primary_key,
|
183
184
|
:name => nil, :columns => columns}.merge(opts)
|
184
185
|
end
|
185
186
|
|
187
|
+
# Add a composite foreign key constraint
|
186
188
|
def composite_foreign_key(columns, opts)
|
187
189
|
@columns << {:type => :check, :constraint_type => :foreign_key,
|
188
190
|
:name => nil, :columns => columns }.merge(opts)
|
@@ -304,11 +306,13 @@ module Sequel
|
|
304
306
|
|
305
307
|
private
|
306
308
|
|
309
|
+
# Add a composite primary key constraint
|
307
310
|
def add_composite_primary_key(columns, opts)
|
308
311
|
@operations << {:op => :add_constraint, :type => :check,
|
309
312
|
:constraint_type => :primary_key, :columns => columns}.merge(opts)
|
310
313
|
end
|
311
314
|
|
315
|
+
# Add a composite foreign key constraint
|
312
316
|
def add_composite_foreign_key(columns, table, opts)
|
313
317
|
@operations << {:op => :add_constraint, :type => :check,
|
314
318
|
:constraint_type => :foreign_key, :columns => columns,
|
@@ -21,8 +21,7 @@ module Sequel
|
|
21
21
|
alter_table(table) {add_index(*args)}
|
22
22
|
end
|
23
23
|
|
24
|
-
# Alters the given table with the specified block.
|
25
|
-
# available operations:
|
24
|
+
# Alters the given table with the specified block. Example:
|
26
25
|
#
|
27
26
|
# DB.alter_table :items do
|
28
27
|
# add_column :category, :text, :default => 'ruby'
|
@@ -48,9 +47,9 @@ module Sequel
|
|
48
47
|
# Creates a table with the columns given in the provided block:
|
49
48
|
#
|
50
49
|
# DB.create_table :posts do
|
51
|
-
# primary_key :id
|
50
|
+
# primary_key :id
|
52
51
|
# column :title, :text
|
53
|
-
#
|
52
|
+
# String :content
|
54
53
|
# index :title
|
55
54
|
# end
|
56
55
|
#
|
@@ -60,7 +59,7 @@ module Sequel
|
|
60
59
|
create_table_sql_list(name, *((options[:generator] || Schema::Generator.new(self, &block)).create_info << options)).flatten.each {|sql| execute_ddl(sql)}
|
61
60
|
end
|
62
61
|
|
63
|
-
# Forcibly creates a table
|
62
|
+
# Forcibly creates a table, attempting to drop it unconditionally (and catching any errors), then creating it.
|
64
63
|
def create_table!(name, options={}, &block)
|
65
64
|
drop_table(name) rescue nil
|
66
65
|
create_table(name, options, &block)
|
@@ -73,7 +72,7 @@ module Sequel
|
|
73
72
|
def create_or_replace_view(name, source)
|
74
73
|
remove_cached_schema(name)
|
75
74
|
source = source.sql if source.is_a?(Dataset)
|
76
|
-
execute_ddl("CREATE OR REPLACE VIEW #{
|
75
|
+
execute_ddl("CREATE OR REPLACE VIEW #{quote_schema_table(name)} AS #{source}")
|
77
76
|
end
|
78
77
|
|
79
78
|
# Creates a view based on a dataset or an SQL string:
|
@@ -82,7 +81,7 @@ module Sequel
|
|
82
81
|
# DB.create_view(:ruby_items, DB[:items].filter(:category => 'ruby'))
|
83
82
|
def create_view(name, source)
|
84
83
|
source = source.sql if source.is_a?(Dataset)
|
85
|
-
execute_ddl("CREATE VIEW #{
|
84
|
+
execute_ddl("CREATE VIEW #{quote_schema_table(name)} AS #{source}")
|
86
85
|
end
|
87
86
|
|
88
87
|
# Removes a column from the specified table:
|
@@ -104,7 +103,7 @@ module Sequel
|
|
104
103
|
alter_table(table) {drop_index(columns)}
|
105
104
|
end
|
106
105
|
|
107
|
-
# Drops one or more tables corresponding to the given
|
106
|
+
# Drops one or more tables corresponding to the given names:
|
108
107
|
#
|
109
108
|
# DB.drop_table(:posts, :comments)
|
110
109
|
def drop_table(*names)
|
@@ -114,13 +113,13 @@ module Sequel
|
|
114
113
|
end
|
115
114
|
end
|
116
115
|
|
117
|
-
# Drops
|
116
|
+
# Drops one or more views corresponding to the given names:
|
118
117
|
#
|
119
118
|
# DB.drop_view(:cheap_items)
|
120
119
|
def drop_view(*names)
|
121
120
|
names.each do |n|
|
122
121
|
remove_cached_schema(n)
|
123
|
-
execute_ddl("DROP VIEW #{
|
122
|
+
execute_ddl("DROP VIEW #{quote_schema_table(n)}")
|
124
123
|
end
|
125
124
|
end
|
126
125
|
|
@@ -129,8 +128,9 @@ module Sequel
|
|
129
128
|
# DB.tables #=> [:items]
|
130
129
|
# DB.rename_table :items, :old_items
|
131
130
|
# DB.tables #=> [:old_items]
|
132
|
-
def rename_table(
|
133
|
-
|
131
|
+
def rename_table(name, new_name)
|
132
|
+
remove_cached_schema(name)
|
133
|
+
execute_ddl(rename_table_sql(name, new_name))
|
134
134
|
end
|
135
135
|
|
136
136
|
# Renames a column in the specified table. This method expects the current
|
@@ -0,0 +1,224 @@
|
|
1
|
+
module Sequel
|
2
|
+
class Database
|
3
|
+
AUTOINCREMENT = 'AUTOINCREMENT'.freeze
|
4
|
+
CASCADE = 'CASCADE'.freeze
|
5
|
+
COMMA_SEPARATOR = ', '.freeze
|
6
|
+
NO_ACTION = 'NO ACTION'.freeze
|
7
|
+
NOT_NULL = ' NOT NULL'.freeze
|
8
|
+
NULL = ' NULL'.freeze
|
9
|
+
PRIMARY_KEY = ' PRIMARY KEY'.freeze
|
10
|
+
RESTRICT = 'RESTRICT'.freeze
|
11
|
+
SET_DEFAULT = 'SET DEFAULT'.freeze
|
12
|
+
SET_NULL = 'SET NULL'.freeze
|
13
|
+
TYPES = Hash.new {|h, k| k}
|
14
|
+
TYPES.merge!(:double=>'double precision', String=>'varchar(255)',
|
15
|
+
Integer=>'integer', Fixnum=>'integer', Bignum=>'bigint',
|
16
|
+
Float=>'double precision', BigDecimal=>'numeric', Numeric=>'numeric',
|
17
|
+
Date=>'date', DateTime=>'timestamp', Time=>'timestamp', File=>'blob',
|
18
|
+
TrueClass=>'boolean', FalseClass=>'boolean')
|
19
|
+
UNDERSCORE = '_'.freeze
|
20
|
+
UNIQUE = ' UNIQUE'.freeze
|
21
|
+
UNSIGNED = ' UNSIGNED'.freeze
|
22
|
+
|
23
|
+
# Default serial primary key options.
|
24
|
+
def serial_primary_key_options
|
25
|
+
{:primary_key => true, :type => Integer, :auto_increment => true}
|
26
|
+
end
|
27
|
+
|
28
|
+
private
|
29
|
+
|
30
|
+
# The SQL to execute to modify the DDL for the given table name. op
|
31
|
+
# should be one of the operations returned by the AlterTableGenerator.
|
32
|
+
def alter_table_sql(table, op)
|
33
|
+
quoted_name = quote_identifier(op[:name]) if op[:name]
|
34
|
+
alter_table_op = case op[:op]
|
35
|
+
when :add_column
|
36
|
+
"ADD COLUMN #{column_definition_sql(op)}"
|
37
|
+
when :drop_column
|
38
|
+
"DROP COLUMN #{quoted_name}"
|
39
|
+
when :rename_column
|
40
|
+
"RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
|
41
|
+
when :set_column_type
|
42
|
+
"ALTER COLUMN #{quoted_name} TYPE #{type_literal(op)}"
|
43
|
+
when :set_column_default
|
44
|
+
"ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
|
45
|
+
when :set_column_null
|
46
|
+
"ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL"
|
47
|
+
when :add_index
|
48
|
+
return index_definition_sql(table, op)
|
49
|
+
when :drop_index
|
50
|
+
return drop_index_sql(table, op)
|
51
|
+
when :add_constraint
|
52
|
+
"ADD #{constraint_definition_sql(op)}"
|
53
|
+
when :drop_constraint
|
54
|
+
"DROP CONSTRAINT #{quoted_name}"
|
55
|
+
else
|
56
|
+
raise Error, "Unsupported ALTER TABLE operation"
|
57
|
+
end
|
58
|
+
"ALTER TABLE #{quote_schema_table(table)} #{alter_table_op}"
|
59
|
+
end
|
60
|
+
|
61
|
+
# Array of SQL DDL modification statements for the given table,
|
62
|
+
# corresponding to the DDL changes specified by the operations.
|
63
|
+
def alter_table_sql_list(table, operations)
|
64
|
+
operations.map{|op| alter_table_sql(table, op)}
|
65
|
+
end
|
66
|
+
|
67
|
+
# The SQL string specify the autoincrement property, generally used by
|
68
|
+
# primary keys.
|
69
|
+
def auto_increment_sql
|
70
|
+
AUTOINCREMENT
|
71
|
+
end
|
72
|
+
|
73
|
+
# SQL DDL fragment containing the column creation SQL for the given column.
|
74
|
+
def column_definition_sql(column)
|
75
|
+
return constraint_definition_sql(column) if column[:type] == :check
|
76
|
+
sql = "#{quote_identifier(column[:name])} #{type_literal(column)}"
|
77
|
+
sql << UNIQUE if column[:unique]
|
78
|
+
sql << NOT_NULL if column[:null] == false
|
79
|
+
sql << NULL if column[:null] == true
|
80
|
+
sql << " DEFAULT #{literal(column[:default])}" if column.include?(:default)
|
81
|
+
sql << PRIMARY_KEY if column[:primary_key]
|
82
|
+
sql << " #{auto_increment_sql}" if column[:auto_increment]
|
83
|
+
sql << column_references_sql(column) if column[:table]
|
84
|
+
sql
|
85
|
+
end
|
86
|
+
|
87
|
+
# SQL DDL fragment containing the column creation
|
88
|
+
# SQL for all given columns, used inside a CREATE TABLE block.
|
89
|
+
def column_list_sql(columns)
|
90
|
+
columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)
|
91
|
+
end
|
92
|
+
|
93
|
+
# SQL DDL fragment for column foreign key references
|
94
|
+
def column_references_sql(column)
|
95
|
+
sql = " REFERENCES #{quote_schema_table(column[:table])}"
|
96
|
+
sql << "(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(COMMA_SEPARATOR)})" if column[:key]
|
97
|
+
sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete]
|
98
|
+
sql << " ON UPDATE #{on_delete_clause(column[:on_update])}" if column[:on_update]
|
99
|
+
sql
|
100
|
+
end
|
101
|
+
|
102
|
+
# SQL DDL fragment specifying a constraint on a table.
|
103
|
+
def constraint_definition_sql(constraint)
|
104
|
+
sql = constraint[:name] ? "CONSTRAINT #{quote_identifier(constraint[:name])} " : ""
|
105
|
+
case constraint[:constraint_type]
|
106
|
+
when :primary_key
|
107
|
+
sql << "PRIMARY KEY #{literal(constraint[:columns])}"
|
108
|
+
when :foreign_key
|
109
|
+
sql << "FOREIGN KEY #{literal(constraint[:columns])}"
|
110
|
+
sql << column_references_sql(constraint)
|
111
|
+
when :unique
|
112
|
+
sql << "UNIQUE #{literal(constraint[:columns])}"
|
113
|
+
else
|
114
|
+
check = constraint[:check]
|
115
|
+
sql << "CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}"
|
116
|
+
end
|
117
|
+
sql
|
118
|
+
end
|
119
|
+
|
120
|
+
# Array of SQL DDL statements, the first for creating a table with the given
|
121
|
+
# name and column specifications, and the others for specifying indexes on
|
122
|
+
# the table.
|
123
|
+
def create_table_sql_list(name, columns, indexes = nil, options = {})
|
124
|
+
sql = ["CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})"]
|
125
|
+
sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?
|
126
|
+
sql
|
127
|
+
end
|
128
|
+
|
129
|
+
# Default index name for the table and columns, may be too long
|
130
|
+
# for certain databases.
|
131
|
+
def default_index_name(table_name, columns)
|
132
|
+
schema, table = schema_and_table(table_name)
|
133
|
+
"#{"#{schema}_" if schema and schema != default_schema}#{table}_#{columns.map{|c| [String, Symbol].any?{|cl| c.is_a?(cl)} ? c : literal(c).gsub(/\W/, '_')}.join(UNDERSCORE)}_index"
|
134
|
+
end
|
135
|
+
|
136
|
+
# The SQL to drop an index for the table.
|
137
|
+
def drop_index_sql(table, op)
|
138
|
+
"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}"
|
139
|
+
end
|
140
|
+
|
141
|
+
# SQL DDL statement to drop the table with the given name.
|
142
|
+
def drop_table_sql(name)
|
143
|
+
"DROP TABLE #{quote_schema_table(name)}"
|
144
|
+
end
|
145
|
+
|
146
|
+
# Proxy the filter_expr call to the dataset, used for creating constraints.
|
147
|
+
def filter_expr(*args, &block)
|
148
|
+
schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))
|
149
|
+
end
|
150
|
+
|
151
|
+
# SQL DDL statement for creating an index for the table with the given name
|
152
|
+
# and index specifications.
|
153
|
+
def index_definition_sql(table_name, index)
|
154
|
+
index_name = index[:name] || default_index_name(table_name, index[:columns])
|
155
|
+
if index[:type]
|
156
|
+
raise Error, "Index types are not supported for this database"
|
157
|
+
elsif index[:where]
|
158
|
+
raise Error, "Partial indexes are not supported for this database"
|
159
|
+
else
|
160
|
+
"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}"
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
# Array of SQL DDL statements, one for each index specification,
|
165
|
+
# for the given table.
|
166
|
+
def index_list_sql_list(table_name, indexes)
|
167
|
+
indexes.map{|i| index_definition_sql(table_name, i)}
|
168
|
+
end
|
169
|
+
|
170
|
+
# SQL DDL ON DELETE fragment to use, based on the given action.
|
171
|
+
# The following actions are recognized:
|
172
|
+
#
|
173
|
+
# * :cascade - Delete rows referencing this row.
|
174
|
+
# * :no_action (default) - Raise an error if other rows reference this
|
175
|
+
# row, allow deferring of the integrity check.
|
176
|
+
# * :restrict - Raise an error if other rows reference this row,
|
177
|
+
# but do not allow deferring the integrity check.
|
178
|
+
# * :set_default - Set columns referencing this row to their default value.
|
179
|
+
# * :set_null - Set columns referencing this row to NULL.
|
180
|
+
def on_delete_clause(action)
|
181
|
+
case action
|
182
|
+
when :restrict
|
183
|
+
RESTRICT
|
184
|
+
when :cascade
|
185
|
+
CASCADE
|
186
|
+
when :set_null
|
187
|
+
SET_NULL
|
188
|
+
when :set_default
|
189
|
+
SET_DEFAULT
|
190
|
+
else
|
191
|
+
NO_ACTION
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|
195
|
+
# Proxy the quote_schema_table method to the dataset
|
196
|
+
def quote_schema_table(table)
|
197
|
+
schema_utility_dataset.quote_schema_table(table)
|
198
|
+
end
|
199
|
+
|
200
|
+
# Proxy the quote_identifier method to the dataset, used for quoting tables and columns.
|
201
|
+
def quote_identifier(v)
|
202
|
+
schema_utility_dataset.quote_identifier(v)
|
203
|
+
end
|
204
|
+
|
205
|
+
# SQL DDL statement for renaming a table.
|
206
|
+
def rename_table_sql(name, new_name)
|
207
|
+
"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}"
|
208
|
+
end
|
209
|
+
|
210
|
+
# SQL fragment specifying the type of a given column.
|
211
|
+
def type_literal(column)
|
212
|
+
type = type_literal_base(column)
|
213
|
+
column[:size] ||= 255 if type.to_s == 'varchar'
|
214
|
+
elements = column[:size] || column[:elements]
|
215
|
+
"#{type}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}"
|
216
|
+
end
|
217
|
+
|
218
|
+
# SQL fragment specifying the base type of a given column,
|
219
|
+
# without the size or elements.
|
220
|
+
def type_literal_base(column)
|
221
|
+
TYPES[column[:type]]
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
@@ -1,27 +1,22 @@
|
|
1
|
-
%w'callback convenience pagination prepared_statements query schema sql'.each do |f|
|
2
|
-
require "sequel_core/dataset/#{f}"
|
3
|
-
end
|
4
|
-
|
5
1
|
module Sequel
|
6
|
-
# A
|
7
|
-
#
|
2
|
+
# A dataset represents an SQL query, or more generally, an abstract
|
3
|
+
# set of rows in the database. Datasets
|
8
4
|
# can be used to create, retrieve, update and delete records.
|
9
5
|
#
|
10
6
|
# Query results are always retrieved on demand, so a dataset can be kept
|
11
|
-
# around and reused indefinitely:
|
7
|
+
# around and reused indefinitely (datasets never cache results):
|
12
8
|
#
|
13
9
|
# my_posts = DB[:posts].filter(:author => 'david') # no records are retrieved
|
14
|
-
#
|
15
|
-
#
|
16
|
-
# p my_posts.all # records are retrieved again
|
10
|
+
# my_posts.all # records are retrieved
|
11
|
+
# my_posts.all # records are retrieved again
|
17
12
|
#
|
18
|
-
#
|
19
|
-
#
|
20
|
-
# use different datasets to access data:
|
13
|
+
# Most dataset methods return modified copies of the dataset (functional style), so you can
|
14
|
+
# reuse different datasets to access data:
|
21
15
|
#
|
22
16
|
# posts = DB[:posts]
|
23
17
|
# davids_posts = posts.filter(:author => 'david')
|
24
18
|
# old_posts = posts.filter('stamp < ?', Date.today - 7)
|
19
|
+
# davids_old_posts = davids_posts.filter('stamp < ?', Date.today - 7)
|
25
20
|
#
|
26
21
|
# Datasets are Enumerable objects, so they can be manipulated using any
|
27
22
|
# of the Enumerable methods, such as map, inject, etc.
|
@@ -37,15 +32,14 @@ module Sequel
|
|
37
32
|
# This methods are shortcuts to join_table with the join type
|
38
33
|
# already specified.
|
39
34
|
class Dataset
|
35
|
+
extend Metaprogramming
|
36
|
+
include Metaprogramming
|
40
37
|
include Enumerable
|
41
38
|
|
42
39
|
# The dataset options that require the removal of cached columns
|
43
40
|
# if changed.
|
44
41
|
COLUMN_CHANGE_OPTS = [:select, :sql, :from, :join].freeze
|
45
42
|
|
46
|
-
# Array of all subclasses of Dataset
|
47
|
-
DATASET_CLASSES = []
|
48
|
-
|
49
43
|
# All methods that should have a ! method added that modifies
|
50
44
|
# the receiver.
|
51
45
|
MUTATION_METHODS = %w'add_graph_aliases and distinct exclude exists
|
@@ -53,21 +47,10 @@ module Sequel
|
|
53
47
|
group group_and_count group_by having inner_join intersect invert join
|
54
48
|
left_outer_join limit naked or order order_by order_more paginate query reject
|
55
49
|
reverse reverse_order right_outer_join select select_all select_more
|
56
|
-
set_defaults set_graph_aliases
|
50
|
+
set_defaults set_graph_aliases set_overrides sort sort_by
|
57
51
|
unfiltered union unordered where with_sql'.collect{|x| x.to_sym}
|
58
52
|
|
59
53
|
NOTIMPL_MSG = "This method must be overridden in Sequel adapters".freeze
|
60
|
-
STOCK_TRANSFORMS = {
|
61
|
-
:marshal => [
|
62
|
-
# for backwards-compatibility we support also non-base64-encoded values.
|
63
|
-
proc {|v| Marshal.load(v.unpack('m')[0]) rescue Marshal.load(v)},
|
64
|
-
proc {|v| [Marshal.dump(v)].pack('m')}
|
65
|
-
],
|
66
|
-
:yaml => [
|
67
|
-
proc {|v| YAML.load v if v},
|
68
|
-
proc {|v| v.to_yaml}
|
69
|
-
]
|
70
|
-
}
|
71
54
|
|
72
55
|
# The database that corresponds to this dataset
|
73
56
|
attr_accessor :db
|
@@ -85,21 +68,18 @@ module Sequel
|
|
85
68
|
attr_writer :quote_identifiers
|
86
69
|
|
87
70
|
# The row_proc for this database, should be a Proc that takes
|
88
|
-
# a single hash argument and returns the object you want
|
89
|
-
#
|
71
|
+
# a single hash argument and returns the object you want
|
72
|
+
# each to return.
|
90
73
|
attr_accessor :row_proc
|
91
74
|
|
92
|
-
# Constructs a new instance
|
93
|
-
# options. Datasets are usually constructed by invoking Database
|
75
|
+
# Constructs a new Dataset instance with an associated database and
|
76
|
+
# options. Datasets are usually constructed by invoking the Database#[] method:
|
94
77
|
#
|
95
78
|
# DB[:posts]
|
96
79
|
#
|
97
|
-
# Or:
|
98
|
-
#
|
99
|
-
# DB.dataset # the returned dataset is blank
|
100
|
-
#
|
101
80
|
# Sequel::Dataset is an abstract class that is not useful by itself. Each
|
102
|
-
# database adaptor should provide a
|
81
|
+
# database adaptor should provide a subclass of Sequel::Dataset, and have
|
82
|
+
# the Database#dataset method return an instance of that class.
|
103
83
|
def initialize(db, opts = nil)
|
104
84
|
@db = db
|
105
85
|
@quote_identifiers = db.quote_identifiers? if db.respond_to?(:quote_identifiers?)
|
@@ -112,11 +92,6 @@ module Sequel
|
|
112
92
|
|
113
93
|
### Class Methods ###
|
114
94
|
|
115
|
-
# The array of dataset subclasses.
|
116
|
-
def self.dataset_classes
|
117
|
-
DATASET_CLASSES
|
118
|
-
end
|
119
|
-
|
120
95
|
# Setup mutation (e.g. filter!) methods. These operate the same as the
|
121
96
|
# non-! methods, but replace the options of the current dataset with the
|
122
97
|
# options of the resulting dataset.
|
@@ -126,11 +101,6 @@ module Sequel
|
|
126
101
|
end
|
127
102
|
end
|
128
103
|
|
129
|
-
# Add the subclass to the array of subclasses.
|
130
|
-
def self.inherited(c)
|
131
|
-
DATASET_CLASSES << c
|
132
|
-
end
|
133
|
-
|
134
104
|
### Instance Methods ###
|
135
105
|
|
136
106
|
# Alias for insert, but not aliased directly so subclasses
|
@@ -139,17 +109,19 @@ module Sequel
|
|
139
109
|
insert(*args)
|
140
110
|
end
|
141
111
|
|
142
|
-
# Return the dataset as
|
143
|
-
#
|
112
|
+
# Return the dataset as an aliased expression with the given alias. You can
|
113
|
+
# use this as a FROM or JOIN dataset, or as a column if this dataset
|
114
|
+
# returns a single row and column.
|
144
115
|
def as(aliaz)
|
145
116
|
::Sequel::SQL::AliasedExpression.new(self, aliaz)
|
146
117
|
end
|
147
118
|
|
148
119
|
# Returns an array with all records in the dataset. If a block is given,
|
149
120
|
# the array is iterated over after all items have been loaded.
|
150
|
-
def all(opts = nil, &block)
|
121
|
+
def all(opts = (defarg=true;nil), &block)
|
122
|
+
Deprecation.deprecate("Calling Dataset#all with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).all.") unless defarg
|
151
123
|
a = []
|
152
|
-
each(opts)
|
124
|
+
defarg ? each{|r| a << r} : each(opts){|r| a << r}
|
153
125
|
post_load(a)
|
154
126
|
a.each(&block) if block
|
155
127
|
a
|
@@ -165,16 +137,19 @@ module Sequel
|
|
165
137
|
c
|
166
138
|
end
|
167
139
|
|
168
|
-
# Returns the columns in the result set in
|
140
|
+
# Returns the columns in the result set in order.
|
169
141
|
# If the columns are currently cached, returns the cached value. Otherwise,
|
170
142
|
# a SELECT query is performed to get a single row. Adapters are expected
|
171
143
|
# to fill the columns cache with the column information when a query is performed.
|
172
|
-
# If the dataset does not have any rows, this
|
173
|
-
#
|
144
|
+
# If the dataset does not have any rows, this may be an empty array depending on how
|
145
|
+
# the adapter is programmed.
|
146
|
+
#
|
147
|
+
# If you are looking for all columns for a single table and maybe some information about
|
148
|
+
# each column (e.g. type), see Database#schema.
|
174
149
|
def columns
|
175
150
|
return @columns if @columns
|
176
|
-
ds = unfiltered.unordered.clone(:distinct => nil)
|
177
|
-
ds.
|
151
|
+
ds = unfiltered.unordered.clone(:distinct => nil, :limit => 1)
|
152
|
+
ds.each{break}
|
178
153
|
@columns = ds.instance_variable_get(:@columns)
|
179
154
|
@columns || []
|
180
155
|
end
|
@@ -195,29 +170,30 @@ module Sequel
|
|
195
170
|
|
196
171
|
# Deletes the records in the dataset. The returned value is generally the
|
197
172
|
# number of records deleted, but that is adapter dependent.
|
198
|
-
def delete(
|
199
|
-
|
173
|
+
def delete(opts=(defarg=true;nil))
|
174
|
+
Deprecation.deprecate("Calling Dataset#delete with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).delete.") unless defarg
|
175
|
+
execute_dui(defarg ? delete_sql : delete_sql(opts))
|
200
176
|
end
|
201
177
|
|
202
|
-
# Iterates over the records in the dataset
|
203
|
-
#
|
204
|
-
def each(opts = nil, &block)
|
178
|
+
# Iterates over the records in the dataset as they are yielded from the
|
179
|
+
# database adapter, and returns self.
|
180
|
+
def each(opts = (defarg=true;nil), &block)
|
181
|
+
Deprecation.deprecate("Calling Dataset#each with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).each.") unless defarg
|
205
182
|
if opts && opts.keys.any?{|o| COLUMN_CHANGE_OPTS.include?(o)}
|
206
183
|
prev_columns = @columns
|
207
184
|
begin
|
208
|
-
_each(opts, &block)
|
185
|
+
defarg ? _each(&block) : _each(opts, &block)
|
209
186
|
ensure
|
210
187
|
@columns = prev_columns
|
211
188
|
end
|
212
189
|
else
|
213
|
-
_each(opts, &block)
|
190
|
+
defarg ? _each(&block) : _each(opts, &block)
|
214
191
|
end
|
215
192
|
self
|
216
193
|
end
|
217
194
|
|
218
195
|
# Executes a select query and fetches records, passing each record to the
|
219
|
-
# supplied block. The yielded records
|
220
|
-
# but that is adapter dependent.
|
196
|
+
# supplied block. The yielded records should be hashes with symbol keys.
|
221
197
|
def fetch_rows(sql, &block)
|
222
198
|
raise NotImplementedError, NOTIMPL_MSG
|
223
199
|
end
|
@@ -234,24 +210,12 @@ module Sequel
|
|
234
210
|
"#<#{self.class}: #{sql.inspect}>"
|
235
211
|
end
|
236
212
|
|
237
|
-
# Returns the the model classes associated with the dataset as a hash.
|
238
|
-
# If the dataset is associated with a single model class, a key of nil
|
239
|
-
# is used. For datasets with polymorphic models, the keys are
|
240
|
-
# values of the polymorphic column and the values are the corresponding
|
241
|
-
# model classes to which they map.
|
242
|
-
def model_classes
|
243
|
-
@opts[:models]
|
244
|
-
end
|
245
|
-
|
246
213
|
# Returns a naked dataset clone - i.e. a dataset that returns records as
|
247
|
-
# hashes
|
214
|
+
# hashes instead of calling the row proc.
|
248
215
|
def naked
|
249
|
-
clone
|
250
|
-
|
251
|
-
|
252
|
-
# Returns the column name for the polymorphic key.
|
253
|
-
def polymorphic_key
|
254
|
-
@opts[:polymorphic_key]
|
216
|
+
ds = clone
|
217
|
+
ds.row_proc = nil
|
218
|
+
ds
|
255
219
|
end
|
256
220
|
|
257
221
|
# Whether this dataset quotes identifiers.
|
@@ -260,8 +224,8 @@ module Sequel
|
|
260
224
|
end
|
261
225
|
|
262
226
|
# Set the server for this dataset to use. Used to pick a specific database
|
263
|
-
# shard to run a query against, or to override the default SELECT uses
|
264
|
-
# :read_only database and all other queries use the :default database.
|
227
|
+
# shard to run a query against, or to override the default (which is SELECT uses
|
228
|
+
# :read_only database and all other queries use the :default database).
|
265
229
|
def server(servr)
|
266
230
|
clone(:server=>servr)
|
267
231
|
end
|
@@ -278,164 +242,17 @@ module Sequel
|
|
278
242
|
clone(:defaults=>(@opts[:defaults]||{}).merge(hash))
|
279
243
|
end
|
280
244
|
|
281
|
-
# Associates or disassociates the dataset with a model(s). If
|
282
|
-
# nil is specified, the dataset is turned into a naked dataset and returns
|
283
|
-
# records as hashes. If a model class specified, the dataset is modified
|
284
|
-
# to return records as instances of the model class, e.g:
|
285
|
-
#
|
286
|
-
# class MyModel
|
287
|
-
# def initialize(values)
|
288
|
-
# @values = values
|
289
|
-
# ...
|
290
|
-
# end
|
291
|
-
# end
|
292
|
-
#
|
293
|
-
# dataset.set_model(MyModel)
|
294
|
-
#
|
295
|
-
# You can also provide additional arguments to be passed to the model's
|
296
|
-
# initialize method:
|
297
|
-
#
|
298
|
-
# class MyModel
|
299
|
-
# def initialize(values, options)
|
300
|
-
# @values = values
|
301
|
-
# ...
|
302
|
-
# end
|
303
|
-
# end
|
304
|
-
#
|
305
|
-
# dataset.set_model(MyModel, :allow_delete => false)
|
306
|
-
#
|
307
|
-
# The dataset can be made polymorphic by specifying a column name as the
|
308
|
-
# polymorphic key and a hash mapping column values to model classes.
|
309
|
-
#
|
310
|
-
# dataset.set_model(:kind, {1 => Person, 2 => Business})
|
311
|
-
#
|
312
|
-
# You can also set a default model class to fall back on by specifying a
|
313
|
-
# class corresponding to nil:
|
314
|
-
#
|
315
|
-
# dataset.set_model(:kind, {nil => DefaultClass, 1 => Person, 2 => Business})
|
316
|
-
#
|
317
|
-
# To make sure that there is always a default model class, the hash provided
|
318
|
-
# should have a default value. To make the dataset map string values to
|
319
|
-
# model classes, and keep a good default, try:
|
320
|
-
#
|
321
|
-
# dataset.set_model(:kind, Hash.new{|h,k| h[k] = (k.constantize rescue DefaultClass)})
|
322
|
-
def set_model(key, *args)
|
323
|
-
# This code is more verbose then necessary for performance reasons
|
324
|
-
case key
|
325
|
-
when nil # set_model(nil) => no argument provided, so the dataset is denuded
|
326
|
-
@opts.merge!(:naked => true, :models => nil, :polymorphic_key => nil)
|
327
|
-
self.row_proc = nil
|
328
|
-
when Class
|
329
|
-
# isomorphic model
|
330
|
-
@opts.merge!(:naked => nil, :models => {nil => key}, :polymorphic_key => nil)
|
331
|
-
if key.respond_to?(:load)
|
332
|
-
# the class has a values setter method, so we use it
|
333
|
-
self.row_proc = proc{|h| key.load(h, *args)}
|
334
|
-
else
|
335
|
-
# otherwise we just pass the hash to the constructor
|
336
|
-
self.row_proc = proc{|h| key.new(h, *args)}
|
337
|
-
end
|
338
|
-
when Symbol
|
339
|
-
# polymorphic model
|
340
|
-
hash = args.shift || raise(ArgumentError, "No class hash supplied for polymorphic model")
|
341
|
-
@opts.merge!(:naked => true, :models => hash, :polymorphic_key => key)
|
342
|
-
if (hash.empty? ? (hash[nil] rescue nil) : hash.values.first).respond_to?(:load)
|
343
|
-
# the class has a values setter method, so we use it
|
344
|
-
self.row_proc = proc do |h|
|
345
|
-
c = hash[h[key]] || hash[nil] || \
|
346
|
-
raise(Error, "No matching model class for record (#{polymorphic_key} => #{h[polymorphic_key].inspect})")
|
347
|
-
c.load(h, *args)
|
348
|
-
end
|
349
|
-
else
|
350
|
-
# otherwise we just pass the hash to the constructor
|
351
|
-
self.row_proc = proc do |h|
|
352
|
-
c = hash[h[key]] || hash[nil] || \
|
353
|
-
raise(Error, "No matching model class for record (#{polymorphic_key} => #{h[polymorphic_key].inspect})")
|
354
|
-
c.new(h, *args)
|
355
|
-
end
|
356
|
-
end
|
357
|
-
else
|
358
|
-
raise ArgumentError, "Invalid model specified"
|
359
|
-
end
|
360
|
-
self
|
361
|
-
end
|
362
|
-
|
363
245
|
# Set values that override hash arguments given to insert and update statements.
|
364
246
|
# This hash is merged into the hash provided to insert or update.
|
365
247
|
def set_overrides(hash)
|
366
248
|
clone(:overrides=>hash.merge(@opts[:overrides]||{}))
|
367
249
|
end
|
368
250
|
|
369
|
-
# Sets a value transform which is used to convert values loaded and saved
|
370
|
-
# to/from the database. The transform should be supplied as a hash. Each
|
371
|
-
# value in the hash should be an array containing two proc objects - one
|
372
|
-
# for transforming loaded values, and one for transforming saved values.
|
373
|
-
# The following example demonstrates how to store Ruby objects in a dataset
|
374
|
-
# using Marshal serialization:
|
375
|
-
#
|
376
|
-
# dataset.transform(:obj => [
|
377
|
-
# proc {|v| Marshal.load(v)},
|
378
|
-
# proc {|v| Marshal.dump(v)}
|
379
|
-
# ])
|
380
|
-
#
|
381
|
-
# dataset.insert_sql(:obj => 1234) #=>
|
382
|
-
# "INSERT INTO items (obj) VALUES ('\004\bi\002\322\004')"
|
383
|
-
#
|
384
|
-
# Another form of using transform is by specifying stock transforms:
|
385
|
-
#
|
386
|
-
# dataset.transform(:obj => :marshal)
|
387
|
-
#
|
388
|
-
# The currently supported stock transforms are :marshal and :yaml.
|
389
|
-
def transform(t)
|
390
|
-
@transform = t
|
391
|
-
t.each do |k, v|
|
392
|
-
case v
|
393
|
-
when Array
|
394
|
-
if (v.size != 2) || !v.first.is_a?(Proc) && !v.last.is_a?(Proc)
|
395
|
-
raise Error::InvalidTransform, "Invalid transform specified"
|
396
|
-
end
|
397
|
-
else
|
398
|
-
unless v = STOCK_TRANSFORMS[v]
|
399
|
-
raise Error::InvalidTransform, "Invalid transform specified"
|
400
|
-
else
|
401
|
-
t[k] = v
|
402
|
-
end
|
403
|
-
end
|
404
|
-
end
|
405
|
-
self
|
406
|
-
end
|
407
|
-
|
408
|
-
# Applies the value transform for data loaded from the database.
|
409
|
-
def transform_load(r)
|
410
|
-
r.inject({}) do |m, kv|
|
411
|
-
k, v = *kv
|
412
|
-
m[k] = (tt = @transform[k]) ? tt[0][v] : v
|
413
|
-
m
|
414
|
-
end
|
415
|
-
end
|
416
|
-
|
417
|
-
# Applies the value transform for data saved to the database.
|
418
|
-
def transform_save(r)
|
419
|
-
r.inject({}) do |m, kv|
|
420
|
-
k, v = *kv
|
421
|
-
m[k] = (tt = @transform[k]) ? tt[1][v] : v
|
422
|
-
m
|
423
|
-
end
|
424
|
-
end
|
425
|
-
|
426
|
-
def upcase_identifiers=(v)
|
427
|
-
@identifier_input_method = v ? :upcase : nil
|
428
|
-
end
|
429
|
-
|
430
|
-
# Whether this dataset upcases identifiers.
|
431
|
-
def upcase_identifiers?
|
432
|
-
@identifier_input_method == :upcase
|
433
|
-
end
|
434
|
-
|
435
251
|
# Updates values for the dataset. The returned value is generally the
|
436
252
|
# number of rows updated, but that is adapter dependent.
|
437
|
-
def update(
|
438
|
-
|
253
|
+
def update(values={}, opts=(defarg=true;nil))
|
254
|
+
Deprecation.deprecate("Calling Dataset#update with an argument is deprecated and will raise an error in Sequel 3.0. Use dataset.clone(opts).update.") unless defarg
|
255
|
+
execute_dui(defarg ? update_sql(values) : update_sql(value, opts))
|
439
256
|
end
|
440
257
|
|
441
258
|
# Add the mutation methods via metaprogramming
|
@@ -453,13 +270,13 @@ module Sequel
|
|
453
270
|
# Runs #graph_each if graphing. Otherwise, iterates through the records
|
454
271
|
# yielded by #fetch_rows, applying any row_proc or transform if necessary,
|
455
272
|
# and yielding the result.
|
456
|
-
def _each(opts, &block)
|
273
|
+
def _each(opts=(defarg=true;nil), &block)
|
457
274
|
if @opts[:graph] and !(opts && opts[:graph] == false)
|
458
|
-
graph_each(opts, &block)
|
275
|
+
defarg ? graph_each(&block) : graph_each(opts, &block)
|
459
276
|
else
|
460
277
|
row_proc = @row_proc unless opts && opts[:naked]
|
461
278
|
transform = @transform
|
462
|
-
fetch_rows(select_sql(opts)) do |r|
|
279
|
+
fetch_rows(defarg ? select_sql : select_sql(opts)) do |r|
|
463
280
|
r = transform_load(r) if transform
|
464
281
|
r = row_proc[r] if row_proc
|
465
282
|
yield r
|
@@ -507,5 +324,30 @@ module Sequel
|
|
507
324
|
def output_identifier(v)
|
508
325
|
(i = identifier_output_method) ? v.to_s.send(i).to_sym : v.to_sym
|
509
326
|
end
|
327
|
+
|
328
|
+
# This is run inside .all, after all of the records have been loaded
|
329
|
+
# via .each, but before any block passed to all is called. It is called with
|
330
|
+
# a single argument, an array of all returned records. Does nothing by
|
331
|
+
# default, added to make the model eager loading code simpler.
|
332
|
+
def post_load(all_records)
|
333
|
+
end
|
334
|
+
|
335
|
+
# If a block argument is passed to a method that uses a VirtualRow,
|
336
|
+
# yield a new VirtualRow instance to the block if it accepts a single
|
337
|
+
# argument. Otherwise, evaluate the block in the context of a new
|
338
|
+
# VirtualRow instance.
|
339
|
+
def virtual_row_block_call(block)
|
340
|
+
return unless block
|
341
|
+
unless Sequel.virtual_row_instance_eval
|
342
|
+
Deprecation.deprecate('Using a VirtualRow block without an argument is deprecated, and its meaning will change in Sequel 3.0. Add a block argument to keep the old semantics, or set Sequel.virtual_row_instance_eval = true to use instance_eval for VirtualRow blocks without arguments.') unless block.arity == 1
|
343
|
+
return block.call(SQL::VirtualRow.new)
|
344
|
+
end
|
345
|
+
case block.arity
|
346
|
+
when -1, 0
|
347
|
+
SQL::VirtualRow.new.instance_eval(&block)
|
348
|
+
else
|
349
|
+
block.call(SQL::VirtualRow.new)
|
350
|
+
end
|
351
|
+
end
|
510
352
|
end
|
511
353
|
end
|