viking-sequel 3.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +3134 -0
- data/COPYING +19 -0
- data/README.rdoc +723 -0
- data/Rakefile +193 -0
- data/bin/sequel +196 -0
- data/doc/advanced_associations.rdoc +644 -0
- data/doc/cheat_sheet.rdoc +218 -0
- data/doc/dataset_basics.rdoc +106 -0
- data/doc/dataset_filtering.rdoc +158 -0
- data/doc/opening_databases.rdoc +296 -0
- data/doc/prepared_statements.rdoc +104 -0
- data/doc/reflection.rdoc +84 -0
- data/doc/release_notes/1.0.txt +38 -0
- data/doc/release_notes/1.1.txt +143 -0
- data/doc/release_notes/1.3.txt +101 -0
- data/doc/release_notes/1.4.0.txt +53 -0
- data/doc/release_notes/1.5.0.txt +155 -0
- data/doc/release_notes/2.0.0.txt +298 -0
- data/doc/release_notes/2.1.0.txt +271 -0
- data/doc/release_notes/2.10.0.txt +328 -0
- data/doc/release_notes/2.11.0.txt +215 -0
- data/doc/release_notes/2.12.0.txt +534 -0
- data/doc/release_notes/2.2.0.txt +253 -0
- data/doc/release_notes/2.3.0.txt +88 -0
- data/doc/release_notes/2.4.0.txt +106 -0
- data/doc/release_notes/2.5.0.txt +137 -0
- data/doc/release_notes/2.6.0.txt +157 -0
- data/doc/release_notes/2.7.0.txt +166 -0
- data/doc/release_notes/2.8.0.txt +171 -0
- data/doc/release_notes/2.9.0.txt +97 -0
- data/doc/release_notes/3.0.0.txt +221 -0
- data/doc/release_notes/3.1.0.txt +406 -0
- data/doc/release_notes/3.10.0.txt +286 -0
- data/doc/release_notes/3.2.0.txt +268 -0
- data/doc/release_notes/3.3.0.txt +192 -0
- data/doc/release_notes/3.4.0.txt +325 -0
- data/doc/release_notes/3.5.0.txt +510 -0
- data/doc/release_notes/3.6.0.txt +366 -0
- data/doc/release_notes/3.7.0.txt +179 -0
- data/doc/release_notes/3.8.0.txt +151 -0
- data/doc/release_notes/3.9.0.txt +233 -0
- data/doc/schema.rdoc +36 -0
- data/doc/sharding.rdoc +113 -0
- data/doc/virtual_rows.rdoc +205 -0
- data/lib/sequel.rb +1 -0
- data/lib/sequel/adapters/ado.rb +90 -0
- data/lib/sequel/adapters/ado/mssql.rb +30 -0
- data/lib/sequel/adapters/amalgalite.rb +176 -0
- data/lib/sequel/adapters/db2.rb +139 -0
- data/lib/sequel/adapters/dbi.rb +113 -0
- data/lib/sequel/adapters/do.rb +188 -0
- data/lib/sequel/adapters/do/mysql.rb +49 -0
- data/lib/sequel/adapters/do/postgres.rb +91 -0
- data/lib/sequel/adapters/do/sqlite.rb +40 -0
- data/lib/sequel/adapters/firebird.rb +283 -0
- data/lib/sequel/adapters/informix.rb +77 -0
- data/lib/sequel/adapters/jdbc.rb +587 -0
- data/lib/sequel/adapters/jdbc/as400.rb +58 -0
- data/lib/sequel/adapters/jdbc/h2.rb +133 -0
- data/lib/sequel/adapters/jdbc/mssql.rb +57 -0
- data/lib/sequel/adapters/jdbc/mysql.rb +78 -0
- data/lib/sequel/adapters/jdbc/oracle.rb +50 -0
- data/lib/sequel/adapters/jdbc/postgresql.rb +108 -0
- data/lib/sequel/adapters/jdbc/sqlite.rb +55 -0
- data/lib/sequel/adapters/mysql.rb +421 -0
- data/lib/sequel/adapters/odbc.rb +143 -0
- data/lib/sequel/adapters/odbc/mssql.rb +42 -0
- data/lib/sequel/adapters/openbase.rb +64 -0
- data/lib/sequel/adapters/oracle.rb +131 -0
- data/lib/sequel/adapters/postgres.rb +504 -0
- data/lib/sequel/adapters/shared/mssql.rb +490 -0
- data/lib/sequel/adapters/shared/mysql.rb +498 -0
- data/lib/sequel/adapters/shared/oracle.rb +195 -0
- data/lib/sequel/adapters/shared/postgres.rb +830 -0
- data/lib/sequel/adapters/shared/progress.rb +44 -0
- data/lib/sequel/adapters/shared/sqlite.rb +389 -0
- data/lib/sequel/adapters/sqlite.rb +224 -0
- data/lib/sequel/adapters/utils/stored_procedures.rb +84 -0
- data/lib/sequel/connection_pool.rb +99 -0
- data/lib/sequel/connection_pool/sharded_single.rb +84 -0
- data/lib/sequel/connection_pool/sharded_threaded.rb +211 -0
- data/lib/sequel/connection_pool/single.rb +29 -0
- data/lib/sequel/connection_pool/threaded.rb +150 -0
- data/lib/sequel/core.rb +293 -0
- data/lib/sequel/core_sql.rb +241 -0
- data/lib/sequel/database.rb +1079 -0
- data/lib/sequel/database/schema_generator.rb +327 -0
- data/lib/sequel/database/schema_methods.rb +203 -0
- data/lib/sequel/database/schema_sql.rb +320 -0
- data/lib/sequel/dataset.rb +32 -0
- data/lib/sequel/dataset/actions.rb +441 -0
- data/lib/sequel/dataset/features.rb +86 -0
- data/lib/sequel/dataset/graph.rb +254 -0
- data/lib/sequel/dataset/misc.rb +119 -0
- data/lib/sequel/dataset/mutation.rb +64 -0
- data/lib/sequel/dataset/prepared_statements.rb +227 -0
- data/lib/sequel/dataset/query.rb +709 -0
- data/lib/sequel/dataset/sql.rb +996 -0
- data/lib/sequel/exceptions.rb +51 -0
- data/lib/sequel/extensions/blank.rb +43 -0
- data/lib/sequel/extensions/inflector.rb +242 -0
- data/lib/sequel/extensions/looser_typecasting.rb +21 -0
- data/lib/sequel/extensions/migration.rb +239 -0
- data/lib/sequel/extensions/named_timezones.rb +61 -0
- data/lib/sequel/extensions/pagination.rb +100 -0
- data/lib/sequel/extensions/pretty_table.rb +82 -0
- data/lib/sequel/extensions/query.rb +52 -0
- data/lib/sequel/extensions/schema_dumper.rb +271 -0
- data/lib/sequel/extensions/sql_expr.rb +122 -0
- data/lib/sequel/extensions/string_date_time.rb +46 -0
- data/lib/sequel/extensions/thread_local_timezones.rb +48 -0
- data/lib/sequel/metaprogramming.rb +9 -0
- data/lib/sequel/model.rb +120 -0
- data/lib/sequel/model/associations.rb +1514 -0
- data/lib/sequel/model/base.rb +1069 -0
- data/lib/sequel/model/default_inflections.rb +45 -0
- data/lib/sequel/model/errors.rb +39 -0
- data/lib/sequel/model/exceptions.rb +21 -0
- data/lib/sequel/model/inflections.rb +162 -0
- data/lib/sequel/model/plugins.rb +70 -0
- data/lib/sequel/plugins/active_model.rb +59 -0
- data/lib/sequel/plugins/association_dependencies.rb +103 -0
- data/lib/sequel/plugins/association_proxies.rb +41 -0
- data/lib/sequel/plugins/boolean_readers.rb +53 -0
- data/lib/sequel/plugins/caching.rb +141 -0
- data/lib/sequel/plugins/class_table_inheritance.rb +214 -0
- data/lib/sequel/plugins/composition.rb +138 -0
- data/lib/sequel/plugins/force_encoding.rb +72 -0
- data/lib/sequel/plugins/hook_class_methods.rb +126 -0
- data/lib/sequel/plugins/identity_map.rb +116 -0
- data/lib/sequel/plugins/instance_filters.rb +98 -0
- data/lib/sequel/plugins/instance_hooks.rb +57 -0
- data/lib/sequel/plugins/lazy_attributes.rb +77 -0
- data/lib/sequel/plugins/many_through_many.rb +208 -0
- data/lib/sequel/plugins/nested_attributes.rb +206 -0
- data/lib/sequel/plugins/optimistic_locking.rb +81 -0
- data/lib/sequel/plugins/rcte_tree.rb +281 -0
- data/lib/sequel/plugins/schema.rb +66 -0
- data/lib/sequel/plugins/serialization.rb +166 -0
- data/lib/sequel/plugins/single_table_inheritance.rb +74 -0
- data/lib/sequel/plugins/subclasses.rb +45 -0
- data/lib/sequel/plugins/tactical_eager_loading.rb +61 -0
- data/lib/sequel/plugins/timestamps.rb +87 -0
- data/lib/sequel/plugins/touch.rb +118 -0
- data/lib/sequel/plugins/typecast_on_load.rb +72 -0
- data/lib/sequel/plugins/validation_class_methods.rb +405 -0
- data/lib/sequel/plugins/validation_helpers.rb +223 -0
- data/lib/sequel/sql.rb +1020 -0
- data/lib/sequel/timezones.rb +161 -0
- data/lib/sequel/version.rb +12 -0
- data/lib/sequel_core.rb +1 -0
- data/lib/sequel_model.rb +1 -0
- data/spec/adapters/firebird_spec.rb +407 -0
- data/spec/adapters/informix_spec.rb +97 -0
- data/spec/adapters/mssql_spec.rb +403 -0
- data/spec/adapters/mysql_spec.rb +1019 -0
- data/spec/adapters/oracle_spec.rb +286 -0
- data/spec/adapters/postgres_spec.rb +969 -0
- data/spec/adapters/spec_helper.rb +51 -0
- data/spec/adapters/sqlite_spec.rb +432 -0
- data/spec/core/connection_pool_spec.rb +808 -0
- data/spec/core/core_sql_spec.rb +417 -0
- data/spec/core/database_spec.rb +1662 -0
- data/spec/core/dataset_spec.rb +3827 -0
- data/spec/core/expression_filters_spec.rb +595 -0
- data/spec/core/object_graph_spec.rb +296 -0
- data/spec/core/schema_generator_spec.rb +159 -0
- data/spec/core/schema_spec.rb +830 -0
- data/spec/core/spec_helper.rb +56 -0
- data/spec/core/version_spec.rb +7 -0
- data/spec/extensions/active_model_spec.rb +76 -0
- data/spec/extensions/association_dependencies_spec.rb +127 -0
- data/spec/extensions/association_proxies_spec.rb +50 -0
- data/spec/extensions/blank_spec.rb +67 -0
- data/spec/extensions/boolean_readers_spec.rb +92 -0
- data/spec/extensions/caching_spec.rb +250 -0
- data/spec/extensions/class_table_inheritance_spec.rb +252 -0
- data/spec/extensions/composition_spec.rb +194 -0
- data/spec/extensions/force_encoding_spec.rb +117 -0
- data/spec/extensions/hook_class_methods_spec.rb +470 -0
- data/spec/extensions/identity_map_spec.rb +202 -0
- data/spec/extensions/inflector_spec.rb +181 -0
- data/spec/extensions/instance_filters_spec.rb +55 -0
- data/spec/extensions/instance_hooks_spec.rb +133 -0
- data/spec/extensions/lazy_attributes_spec.rb +153 -0
- data/spec/extensions/looser_typecasting_spec.rb +39 -0
- data/spec/extensions/many_through_many_spec.rb +884 -0
- data/spec/extensions/migration_spec.rb +332 -0
- data/spec/extensions/named_timezones_spec.rb +72 -0
- data/spec/extensions/nested_attributes_spec.rb +396 -0
- data/spec/extensions/optimistic_locking_spec.rb +100 -0
- data/spec/extensions/pagination_spec.rb +99 -0
- data/spec/extensions/pretty_table_spec.rb +91 -0
- data/spec/extensions/query_spec.rb +85 -0
- data/spec/extensions/rcte_tree_spec.rb +205 -0
- data/spec/extensions/schema_dumper_spec.rb +357 -0
- data/spec/extensions/schema_spec.rb +127 -0
- data/spec/extensions/serialization_spec.rb +209 -0
- data/spec/extensions/single_table_inheritance_spec.rb +96 -0
- data/spec/extensions/spec_helper.rb +91 -0
- data/spec/extensions/sql_expr_spec.rb +89 -0
- data/spec/extensions/string_date_time_spec.rb +93 -0
- data/spec/extensions/subclasses_spec.rb +52 -0
- data/spec/extensions/tactical_eager_loading_spec.rb +65 -0
- data/spec/extensions/thread_local_timezones_spec.rb +45 -0
- data/spec/extensions/timestamps_spec.rb +150 -0
- data/spec/extensions/touch_spec.rb +155 -0
- data/spec/extensions/typecast_on_load_spec.rb +69 -0
- data/spec/extensions/validation_class_methods_spec.rb +984 -0
- data/spec/extensions/validation_helpers_spec.rb +438 -0
- data/spec/integration/associations_test.rb +281 -0
- data/spec/integration/database_test.rb +26 -0
- data/spec/integration/dataset_test.rb +963 -0
- data/spec/integration/eager_loader_test.rb +734 -0
- data/spec/integration/model_test.rb +130 -0
- data/spec/integration/plugin_test.rb +814 -0
- data/spec/integration/prepared_statement_test.rb +213 -0
- data/spec/integration/schema_test.rb +361 -0
- data/spec/integration/spec_helper.rb +73 -0
- data/spec/integration/timezone_test.rb +55 -0
- data/spec/integration/transaction_test.rb +122 -0
- data/spec/integration/type_test.rb +96 -0
- data/spec/model/association_reflection_spec.rb +175 -0
- data/spec/model/associations_spec.rb +2633 -0
- data/spec/model/base_spec.rb +418 -0
- data/spec/model/dataset_methods_spec.rb +78 -0
- data/spec/model/eager_loading_spec.rb +1391 -0
- data/spec/model/hooks_spec.rb +240 -0
- data/spec/model/inflector_spec.rb +26 -0
- data/spec/model/model_spec.rb +593 -0
- data/spec/model/plugins_spec.rb +236 -0
- data/spec/model/record_spec.rb +1500 -0
- data/spec/model/spec_helper.rb +97 -0
- data/spec/model/validations_spec.rb +153 -0
- data/spec/rcov.opts +6 -0
- data/spec/spec_config.rb.example +10 -0
- metadata +346 -0
@@ -0,0 +1,320 @@
|
|
1
|
+
module Sequel
|
2
|
+
class Database
|
3
|
+
AUTOINCREMENT = 'AUTOINCREMENT'.freeze
|
4
|
+
CASCADE = 'CASCADE'.freeze
|
5
|
+
COMMA_SEPARATOR = ', '.freeze
|
6
|
+
NO_ACTION = 'NO ACTION'.freeze
|
7
|
+
NOT_NULL = ' NOT NULL'.freeze
|
8
|
+
NULL = ' NULL'.freeze
|
9
|
+
PRIMARY_KEY = ' PRIMARY KEY'.freeze
|
10
|
+
RESTRICT = 'RESTRICT'.freeze
|
11
|
+
SET_DEFAULT = 'SET DEFAULT'.freeze
|
12
|
+
SET_NULL = 'SET NULL'.freeze
|
13
|
+
TEMPORARY = 'TEMPORARY '.freeze
|
14
|
+
UNDERSCORE = '_'.freeze
|
15
|
+
UNIQUE = ' UNIQUE'.freeze
|
16
|
+
UNSIGNED = ' UNSIGNED'.freeze
|
17
|
+
|
18
|
+
# Default serial primary key options.
|
19
|
+
def serial_primary_key_options
|
20
|
+
{:primary_key => true, :type => Integer, :auto_increment => true}
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
# The SQL to execute to modify the DDL for the given table name. op
|
26
|
+
# should be one of the operations returned by the AlterTableGenerator.
|
27
|
+
def alter_table_sql(table, op)
|
28
|
+
quoted_name = quote_identifier(op[:name]) if op[:name]
|
29
|
+
alter_table_op = case op[:op]
|
30
|
+
when :add_column
|
31
|
+
"ADD COLUMN #{column_definition_sql(op)}"
|
32
|
+
when :drop_column
|
33
|
+
"DROP COLUMN #{quoted_name}"
|
34
|
+
when :rename_column
|
35
|
+
"RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
|
36
|
+
when :set_column_type
|
37
|
+
"ALTER COLUMN #{quoted_name} TYPE #{type_literal(op)}"
|
38
|
+
when :set_column_default
|
39
|
+
"ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
|
40
|
+
when :set_column_null
|
41
|
+
"ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL"
|
42
|
+
when :add_index
|
43
|
+
return index_definition_sql(table, op)
|
44
|
+
when :drop_index
|
45
|
+
return drop_index_sql(table, op)
|
46
|
+
when :add_constraint
|
47
|
+
"ADD #{constraint_definition_sql(op)}"
|
48
|
+
when :drop_constraint
|
49
|
+
"DROP CONSTRAINT #{quoted_name}"
|
50
|
+
else
|
51
|
+
raise Error, "Unsupported ALTER TABLE operation"
|
52
|
+
end
|
53
|
+
"ALTER TABLE #{quote_schema_table(table)} #{alter_table_op}"
|
54
|
+
end
|
55
|
+
|
56
|
+
# Array of SQL DDL modification statements for the given table,
|
57
|
+
# corresponding to the DDL changes specified by the operations.
|
58
|
+
def alter_table_sql_list(table, operations)
|
59
|
+
operations.map{|op| alter_table_sql(table, op)}
|
60
|
+
end
|
61
|
+
|
62
|
+
# The SQL string specify the autoincrement property, generally used by
|
63
|
+
# primary keys.
|
64
|
+
def auto_increment_sql
|
65
|
+
AUTOINCREMENT
|
66
|
+
end
|
67
|
+
|
68
|
+
# SQL DDL fragment containing the column creation SQL for the given column.
|
69
|
+
def column_definition_sql(column)
|
70
|
+
sql = "#{quote_identifier(column[:name])} #{type_literal(column)}"
|
71
|
+
sql << UNIQUE if column[:unique]
|
72
|
+
null = column.fetch(:null, column[:allow_null])
|
73
|
+
sql << NOT_NULL if null == false
|
74
|
+
sql << NULL if null == true
|
75
|
+
sql << " DEFAULT #{literal(column[:default])}" if column.include?(:default)
|
76
|
+
sql << PRIMARY_KEY if column[:primary_key]
|
77
|
+
sql << " #{auto_increment_sql}" if column[:auto_increment]
|
78
|
+
sql << column_references_column_constraint_sql(column) if column[:table]
|
79
|
+
sql
|
80
|
+
end
|
81
|
+
|
82
|
+
# SQL DDL fragment containing the column creation
|
83
|
+
# SQL for all given columns, used inside a CREATE TABLE block.
|
84
|
+
def column_list_sql(generator)
|
85
|
+
(generator.columns.map{|c| column_definition_sql(c)} + generator.constraints.map{|c| constraint_definition_sql(c)}).join(COMMA_SEPARATOR)
|
86
|
+
end
|
87
|
+
|
88
|
+
# SQL DDL fragment for column foreign key references (column constraints)
|
89
|
+
def column_references_column_constraint_sql(column)
|
90
|
+
column_references_sql(column)
|
91
|
+
end
|
92
|
+
|
93
|
+
# SQL DDL fragment for column foreign key references
|
94
|
+
def column_references_sql(column)
|
95
|
+
sql = " REFERENCES #{quote_schema_table(column[:table])}"
|
96
|
+
sql << "(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(COMMA_SEPARATOR)})" if column[:key]
|
97
|
+
sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete]
|
98
|
+
sql << " ON UPDATE #{on_delete_clause(column[:on_update])}" if column[:on_update]
|
99
|
+
sql
|
100
|
+
end
|
101
|
+
|
102
|
+
# SQL DDL fragment for table foreign key references (table constraints)
|
103
|
+
def column_references_table_constraint_sql(constraint)
|
104
|
+
"FOREIGN KEY #{literal(constraint[:columns])}#{column_references_sql(constraint)}"
|
105
|
+
end
|
106
|
+
|
107
|
+
# SQL DDL fragment specifying a constraint on a table.
|
108
|
+
def constraint_definition_sql(constraint)
|
109
|
+
sql = constraint[:name] ? "CONSTRAINT #{quote_identifier(constraint[:name])} " : ""
|
110
|
+
case constraint[:type]
|
111
|
+
when :check
|
112
|
+
check = constraint[:check]
|
113
|
+
sql << "CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}"
|
114
|
+
when :primary_key
|
115
|
+
sql << "PRIMARY KEY #{literal(constraint[:columns])}"
|
116
|
+
when :foreign_key
|
117
|
+
sql << column_references_table_constraint_sql(constraint)
|
118
|
+
when :unique
|
119
|
+
sql << "UNIQUE #{literal(constraint[:columns])}"
|
120
|
+
else
|
121
|
+
raise Error, "Invalid constriant type #{constraint[:type]}, should be :check, :primary_key, :foreign_key, or :unique"
|
122
|
+
end
|
123
|
+
sql
|
124
|
+
end
|
125
|
+
|
126
|
+
# DDL statement for creating a table with the given name, columns, and options
|
127
|
+
def create_table_sql(name, generator, options)
|
128
|
+
"CREATE #{temporary_table_sql if options[:temp]}TABLE #{quote_schema_table(name)} (#{column_list_sql(generator)})"
|
129
|
+
end
|
130
|
+
|
131
|
+
# Default index name for the table and columns, may be too long
|
132
|
+
# for certain databases.
|
133
|
+
def default_index_name(table_name, columns)
|
134
|
+
schema, table = schema_and_table(table_name)
|
135
|
+
"#{"#{schema}_" if schema and schema != default_schema}#{table}_#{columns.map{|c| [String, Symbol].any?{|cl| c.is_a?(cl)} ? c : literal(c).gsub(/\W/, '_')}.join(UNDERSCORE)}_index"
|
136
|
+
end
|
137
|
+
|
138
|
+
# The SQL to drop an index for the table.
|
139
|
+
def drop_index_sql(table, op)
|
140
|
+
"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}"
|
141
|
+
end
|
142
|
+
|
143
|
+
# SQL DDL statement to drop the table with the given name.
|
144
|
+
def drop_table_sql(name)
|
145
|
+
"DROP TABLE #{quote_schema_table(name)}"
|
146
|
+
end
|
147
|
+
|
148
|
+
# Proxy the filter_expr call to the dataset, used for creating constraints.
|
149
|
+
def filter_expr(*args, &block)
|
150
|
+
schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))
|
151
|
+
end
|
152
|
+
|
153
|
+
# SQL DDL statement for creating an index for the table with the given name
|
154
|
+
# and index specifications.
|
155
|
+
def index_definition_sql(table_name, index)
|
156
|
+
index_name = index[:name] || default_index_name(table_name, index[:columns])
|
157
|
+
if index[:type]
|
158
|
+
raise Error, "Index types are not supported for this database"
|
159
|
+
elsif index[:where]
|
160
|
+
raise Error, "Partial indexes are not supported for this database"
|
161
|
+
else
|
162
|
+
"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_schema_table(table_name)} #{literal(index[:columns])}"
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
# Array of SQL DDL statements, one for each index specification,
|
167
|
+
# for the given table.
|
168
|
+
def index_sql_list(table_name, indexes)
|
169
|
+
indexes.map{|i| index_definition_sql(table_name, i)}
|
170
|
+
end
|
171
|
+
|
172
|
+
# SQL DDL ON DELETE fragment to use, based on the given action.
|
173
|
+
# The following actions are recognized:
|
174
|
+
#
|
175
|
+
# * :cascade - Delete rows referencing this row.
|
176
|
+
# * :no_action (default) - Raise an error if other rows reference this
|
177
|
+
# row, allow deferring of the integrity check.
|
178
|
+
# * :restrict - Raise an error if other rows reference this row,
|
179
|
+
# but do not allow deferring the integrity check.
|
180
|
+
# * :set_default - Set columns referencing this row to their default value.
|
181
|
+
# * :set_null - Set columns referencing this row to NULL.
|
182
|
+
def on_delete_clause(action)
|
183
|
+
case action
|
184
|
+
when :restrict
|
185
|
+
RESTRICT
|
186
|
+
when :cascade
|
187
|
+
CASCADE
|
188
|
+
when :set_null
|
189
|
+
SET_NULL
|
190
|
+
when :set_default
|
191
|
+
SET_DEFAULT
|
192
|
+
else
|
193
|
+
NO_ACTION
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
# Proxy the quote_schema_table method to the dataset
|
198
|
+
def quote_schema_table(table)
|
199
|
+
schema_utility_dataset.quote_schema_table(table)
|
200
|
+
end
|
201
|
+
|
202
|
+
# Proxy the quote_identifier method to the dataset, used for quoting tables and columns.
|
203
|
+
def quote_identifier(v)
|
204
|
+
schema_utility_dataset.quote_identifier(v)
|
205
|
+
end
|
206
|
+
|
207
|
+
# SQL DDL statement for renaming a table.
|
208
|
+
def rename_table_sql(name, new_name)
|
209
|
+
"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}"
|
210
|
+
end
|
211
|
+
|
212
|
+
# SQL DDL fragment for temporary table
|
213
|
+
def temporary_table_sql
|
214
|
+
self.class.const_get(:TEMPORARY)
|
215
|
+
end
|
216
|
+
|
217
|
+
# SQL fragment specifying the type of a given column.
|
218
|
+
def type_literal(column)
|
219
|
+
column[:type].is_a?(Class) ? type_literal_generic(column) : type_literal_specific(column)
|
220
|
+
end
|
221
|
+
|
222
|
+
# SQL fragment specifying the full type of a column,
|
223
|
+
# consider the type with possible modifiers.
|
224
|
+
def type_literal_generic(column)
|
225
|
+
meth = "type_literal_generic_#{column[:type].name.to_s.downcase}"
|
226
|
+
if respond_to?(meth, true)
|
227
|
+
send(meth, column)
|
228
|
+
else
|
229
|
+
raise Error, "Unsupported ruby class used as database type: #{column[:type]}"
|
230
|
+
end
|
231
|
+
end
|
232
|
+
|
233
|
+
# Alias for type_literal_generic_numeric, to make overriding in a subclass easier.
|
234
|
+
def type_literal_generic_bigdecimal(column)
|
235
|
+
type_literal_generic_numeric(column)
|
236
|
+
end
|
237
|
+
|
238
|
+
# Sequel uses the bigint type by default for Bignums.
|
239
|
+
def type_literal_generic_bignum(column)
|
240
|
+
:bigint
|
241
|
+
end
|
242
|
+
|
243
|
+
# Sequel uses the date type by default for Dates.
|
244
|
+
def type_literal_generic_date(column)
|
245
|
+
:date
|
246
|
+
end
|
247
|
+
|
248
|
+
# Sequel uses the timestamp type by default for DateTimes.
|
249
|
+
def type_literal_generic_datetime(column)
|
250
|
+
:timestamp
|
251
|
+
end
|
252
|
+
|
253
|
+
# Alias for type_literal_generic_trueclass, to make overriding in a subclass easier.
|
254
|
+
def type_literal_generic_falseclass(column)
|
255
|
+
type_literal_generic_trueclass(column)
|
256
|
+
end
|
257
|
+
|
258
|
+
# Sequel uses the blob type by default for Files.
|
259
|
+
def type_literal_generic_file(column)
|
260
|
+
:blob
|
261
|
+
end
|
262
|
+
|
263
|
+
# Alias for type_literal_generic_integer, to make overriding in a subclass easier.
|
264
|
+
def type_literal_generic_fixnum(column)
|
265
|
+
type_literal_generic_integer(column)
|
266
|
+
end
|
267
|
+
|
268
|
+
# Sequel uses the double precision type by default for Floats.
|
269
|
+
def type_literal_generic_float(column)
|
270
|
+
:"double precision"
|
271
|
+
end
|
272
|
+
|
273
|
+
# Sequel uses the integer type by default for integers
|
274
|
+
def type_literal_generic_integer(column)
|
275
|
+
:integer
|
276
|
+
end
|
277
|
+
|
278
|
+
# Sequel uses the numeric type by default for Numerics and BigDecimals.
|
279
|
+
# If a size is given, it is used, otherwise, it will default to whatever
|
280
|
+
# the database default is for an unsized value.
|
281
|
+
def type_literal_generic_numeric(column)
|
282
|
+
column[:size] ? "numeric(#{Array(column[:size]).join(', ')})" : :numeric
|
283
|
+
end
|
284
|
+
|
285
|
+
# Sequel uses the varchar type by default for Strings. If a
|
286
|
+
# size isn't present, Sequel assumes a size of 255. If the
|
287
|
+
# :fixed option is used, Sequel uses the char type. If the
|
288
|
+
# :text option is used, Sequel uses the :text type.
|
289
|
+
def type_literal_generic_string(column)
|
290
|
+
if column[:text]
|
291
|
+
:text
|
292
|
+
elsif column[:fixed]
|
293
|
+
"char(#{column[:size]||255})"
|
294
|
+
else
|
295
|
+
"varchar(#{column[:size]||255})"
|
296
|
+
end
|
297
|
+
end
|
298
|
+
|
299
|
+
# Sequel uses the timestamp type by default for Time values.
|
300
|
+
# If the :only_time option is used, the time type is used.
|
301
|
+
def type_literal_generic_time(column)
|
302
|
+
column[:only_time] ? :time : :timestamp
|
303
|
+
end
|
304
|
+
|
305
|
+
# Sequel uses the boolean type by default for TrueClass and FalseClass.
|
306
|
+
def type_literal_generic_trueclass(column)
|
307
|
+
:boolean
|
308
|
+
end
|
309
|
+
|
310
|
+
# SQL fragment for the given type of a column if the column is not one of the
|
311
|
+
# generic types specified with a ruby class.
|
312
|
+
def type_literal_specific(column)
|
313
|
+
type = column[:type]
|
314
|
+
type = "double precision" if type.to_s == 'double'
|
315
|
+
column[:size] ||= 255 if type.to_s == 'varchar'
|
316
|
+
elements = column[:size] || column[:elements]
|
317
|
+
"#{type}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}"
|
318
|
+
end
|
319
|
+
end
|
320
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
module Sequel
|
2
|
+
# A dataset represents an SQL query, or more generally, an abstract
|
3
|
+
# set of rows in the database. Datasets
|
4
|
+
# can be used to create, retrieve, update and delete records.
|
5
|
+
#
|
6
|
+
# Query results are always retrieved on demand, so a dataset can be kept
|
7
|
+
# around and reused indefinitely (datasets never cache results):
|
8
|
+
#
|
9
|
+
# my_posts = DB[:posts].filter(:author => 'david') # no records are retrieved
|
10
|
+
# my_posts.all # records are retrieved
|
11
|
+
# my_posts.all # records are retrieved again
|
12
|
+
#
|
13
|
+
# Most dataset methods return modified copies of the dataset (functional style), so you can
|
14
|
+
# reuse different datasets to access data:
|
15
|
+
#
|
16
|
+
# posts = DB[:posts]
|
17
|
+
# davids_posts = posts.filter(:author => 'david')
|
18
|
+
# old_posts = posts.filter('stamp < ?', Date.today - 7)
|
19
|
+
# davids_old_posts = davids_posts.filter('stamp < ?', Date.today - 7)
|
20
|
+
#
|
21
|
+
# Datasets are Enumerable objects, so they can be manipulated using any
|
22
|
+
# of the Enumerable methods, such as map, inject, etc.
|
23
|
+
#
|
24
|
+
# For more information, see the {"Dataset Basics" guide}[link:files/doc/dataset_basics_rdoc.html].
|
25
|
+
class Dataset
|
26
|
+
extend Metaprogramming
|
27
|
+
include Metaprogramming
|
28
|
+
include Enumerable
|
29
|
+
end
|
30
|
+
|
31
|
+
require(%w"query actions features graph prepared_statements misc mutation sql", 'dataset')
|
32
|
+
end
|
@@ -0,0 +1,441 @@
|
|
1
|
+
module Sequel
|
2
|
+
class Dataset
|
3
|
+
# ---------------------
|
4
|
+
# :section: Methods that execute code on the database
|
5
|
+
# These methods all execute the dataset's SQL on the database.
|
6
|
+
# They don't return modified datasets, so if used in a method chain
|
7
|
+
# they should be the last method called.
|
8
|
+
# ---------------------
|
9
|
+
|
10
|
+
# Alias for insert, but not aliased directly so subclasses
|
11
|
+
# don't have to override both methods.
|
12
|
+
def <<(*args)
|
13
|
+
insert(*args)
|
14
|
+
end
|
15
|
+
|
16
|
+
# Returns the first record matching the conditions. Examples:
|
17
|
+
#
|
18
|
+
# ds[:id=>1] => {:id=1}
|
19
|
+
def [](*conditions)
|
20
|
+
raise(Error, ARRAY_ACCESS_ERROR_MSG) if (conditions.length == 1 and conditions.first.is_a?(Integer)) or conditions.length == 0
|
21
|
+
first(*conditions)
|
22
|
+
end
|
23
|
+
|
24
|
+
# Update all records matching the conditions
|
25
|
+
# with the values specified. Examples:
|
26
|
+
#
|
27
|
+
# ds[:id=>1] = {:id=>2} # SQL: UPDATE ... SET id = 2 WHERE id = 1
|
28
|
+
def []=(conditions, values)
|
29
|
+
filter(conditions).update(values)
|
30
|
+
end
|
31
|
+
|
32
|
+
# Returns an array with all records in the dataset. If a block is given,
|
33
|
+
# the array is iterated over after all items have been loaded.
|
34
|
+
def all(&block)
|
35
|
+
a = []
|
36
|
+
each{|r| a << r}
|
37
|
+
post_load(a)
|
38
|
+
a.each(&block) if block
|
39
|
+
a
|
40
|
+
end
|
41
|
+
|
42
|
+
# Returns the average value for the given column.
|
43
|
+
def avg(column)
|
44
|
+
aggregate_dataset.get{avg(column)}
|
45
|
+
end
|
46
|
+
|
47
|
+
# Returns the columns in the result set in order.
|
48
|
+
# If the columns are currently cached, returns the cached value. Otherwise,
|
49
|
+
# a SELECT query is performed to get a single row. Adapters are expected
|
50
|
+
# to fill the columns cache with the column information when a query is performed.
|
51
|
+
# If the dataset does not have any rows, this may be an empty array depending on how
|
52
|
+
# the adapter is programmed.
|
53
|
+
#
|
54
|
+
# If you are looking for all columns for a single table and maybe some information about
|
55
|
+
# each column (e.g. type), see Database#schema.
|
56
|
+
def columns
|
57
|
+
return @columns if @columns
|
58
|
+
ds = unfiltered.unordered.clone(:distinct => nil, :limit => 1)
|
59
|
+
ds.each{break}
|
60
|
+
@columns = ds.instance_variable_get(:@columns)
|
61
|
+
@columns || []
|
62
|
+
end
|
63
|
+
|
64
|
+
# Remove the cached list of columns and do a SELECT query to find
|
65
|
+
# the columns.
|
66
|
+
def columns!
|
67
|
+
@columns = nil
|
68
|
+
columns
|
69
|
+
end
|
70
|
+
|
71
|
+
# Returns the number of records in the dataset.
|
72
|
+
def count
|
73
|
+
aggregate_dataset.get{COUNT(:*){}.as(count)}.to_i
|
74
|
+
end
|
75
|
+
|
76
|
+
# Deletes the records in the dataset. The returned value is generally the
|
77
|
+
# number of records deleted, but that is adapter dependent. See delete_sql.
|
78
|
+
def delete
|
79
|
+
execute_dui(delete_sql)
|
80
|
+
end
|
81
|
+
|
82
|
+
# Iterates over the records in the dataset as they are yielded from the
|
83
|
+
# database adapter, and returns self.
|
84
|
+
#
|
85
|
+
# Note that this method is not safe to use on many adapters if you are
|
86
|
+
# running additional queries inside the provided block. If you are
|
87
|
+
# running queries inside the block, you use should all instead of each.
|
88
|
+
def each(&block)
|
89
|
+
if @opts[:graph]
|
90
|
+
graph_each(&block)
|
91
|
+
elsif row_proc = @row_proc
|
92
|
+
fetch_rows(select_sql){|r| yield row_proc.call(r)}
|
93
|
+
else
|
94
|
+
fetch_rows(select_sql, &block)
|
95
|
+
end
|
96
|
+
self
|
97
|
+
end
|
98
|
+
|
99
|
+
# Returns true if no records exist in the dataset, false otherwise
|
100
|
+
def empty?
|
101
|
+
get(1).nil?
|
102
|
+
end
|
103
|
+
|
104
|
+
# Executes a select query and fetches records, passing each record to the
|
105
|
+
# supplied block. The yielded records should be hashes with symbol keys.
|
106
|
+
def fetch_rows(sql, &block)
|
107
|
+
raise NotImplementedError, NOTIMPL_MSG
|
108
|
+
end
|
109
|
+
|
110
|
+
# If a integer argument is
|
111
|
+
# given, it is interpreted as a limit, and then returns all
|
112
|
+
# matching records up to that limit. If no argument is passed,
|
113
|
+
# it returns the first matching record. If any other type of
|
114
|
+
# argument(s) is passed, it is given to filter and the
|
115
|
+
# first matching record is returned. If a block is given, it is used
|
116
|
+
# to filter the dataset before returning anything. Examples:
|
117
|
+
#
|
118
|
+
# ds.first => {:id=>7}
|
119
|
+
# ds.first(2) => [{:id=>6}, {:id=>4}]
|
120
|
+
# ds.order(:id).first(2) => [{:id=>1}, {:id=>2}]
|
121
|
+
# ds.first(:id=>2) => {:id=>2}
|
122
|
+
# ds.first("id = 3") => {:id=>3}
|
123
|
+
# ds.first("id = ?", 4) => {:id=>4}
|
124
|
+
# ds.first{|o| o.id > 2} => {:id=>5}
|
125
|
+
# ds.order(:id).first{|o| o.id > 2} => {:id=>3}
|
126
|
+
# ds.first{|o| o.id > 2} => {:id=>5}
|
127
|
+
# ds.first("id > ?", 4){|o| o.id < 6} => {:id=>5}
|
128
|
+
# ds.order(:id).first(2){|o| o.id < 2} => [{:id=>1}]
|
129
|
+
def first(*args, &block)
|
130
|
+
ds = block ? filter(&block) : self
|
131
|
+
|
132
|
+
if args.empty?
|
133
|
+
ds.single_record
|
134
|
+
else
|
135
|
+
args = (args.size == 1) ? args.first : args
|
136
|
+
if Integer === args
|
137
|
+
ds.limit(args).all
|
138
|
+
else
|
139
|
+
ds.filter(args).single_record
|
140
|
+
end
|
141
|
+
end
|
142
|
+
end
|
143
|
+
|
144
|
+
# Return the column value for the first matching record in the dataset.
|
145
|
+
# Raises an error if both an argument and block is given.
|
146
|
+
#
|
147
|
+
# ds.get(:id)
|
148
|
+
# ds.get{|o| o.sum(:id)}
|
149
|
+
def get(column=nil, &block)
|
150
|
+
if column
|
151
|
+
raise(Error, ARG_BLOCK_ERROR_MSG) if block
|
152
|
+
select(column).single_value
|
153
|
+
else
|
154
|
+
select(&block).single_value
|
155
|
+
end
|
156
|
+
end
|
157
|
+
|
158
|
+
# Inserts multiple records into the associated table. This method can be
|
159
|
+
# to efficiently insert a large amounts of records into a table. Inserts
|
160
|
+
# are automatically wrapped in a transaction.
|
161
|
+
#
|
162
|
+
# This method is called with a columns array and an array of value arrays:
|
163
|
+
#
|
164
|
+
# dataset.import([:x, :y], [[1, 2], [3, 4]])
|
165
|
+
#
|
166
|
+
# This method also accepts a dataset instead of an array of value arrays:
|
167
|
+
#
|
168
|
+
# dataset.import([:x, :y], other_dataset.select(:a___x, :b___y))
|
169
|
+
#
|
170
|
+
# The method also accepts a :slice or :commit_every option that specifies
|
171
|
+
# the number of records to insert per transaction. This is useful especially
|
172
|
+
# when inserting a large number of records, e.g.:
|
173
|
+
#
|
174
|
+
# # this will commit every 50 records
|
175
|
+
# dataset.import([:x, :y], [[1, 2], [3, 4], ...], :slice => 50)
|
176
|
+
def import(columns, values, opts={})
|
177
|
+
return @db.transaction{insert(columns, values)} if values.is_a?(Dataset)
|
178
|
+
|
179
|
+
return if values.empty?
|
180
|
+
raise(Error, IMPORT_ERROR_MSG) if columns.empty?
|
181
|
+
|
182
|
+
if slice_size = opts[:commit_every] || opts[:slice]
|
183
|
+
offset = 0
|
184
|
+
loop do
|
185
|
+
@db.transaction(opts){multi_insert_sql(columns, values[offset, slice_size]).each{|st| execute_dui(st)}}
|
186
|
+
offset += slice_size
|
187
|
+
break if offset >= values.length
|
188
|
+
end
|
189
|
+
else
|
190
|
+
statements = multi_insert_sql(columns, values)
|
191
|
+
@db.transaction{statements.each{|st| execute_dui(st)}}
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|
195
|
+
# Inserts values into the associated table. The returned value is generally
|
196
|
+
# the value of the primary key for the inserted row, but that is adapter dependent.
|
197
|
+
# See insert_sql.
|
198
|
+
def insert(*values)
|
199
|
+
execute_insert(insert_sql(*values))
|
200
|
+
end
|
201
|
+
|
202
|
+
# Inserts multiple values. If a block is given it is invoked for each
|
203
|
+
# item in the given array before inserting it. See #multi_insert as
|
204
|
+
# a possible faster version that inserts multiple records in one
|
205
|
+
# SQL statement.
|
206
|
+
def insert_multiple(array, &block)
|
207
|
+
if block
|
208
|
+
array.each {|i| insert(block[i])}
|
209
|
+
else
|
210
|
+
array.each {|i| insert(i)}
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
# Returns the interval between minimum and maximum values for the given
|
215
|
+
# column.
|
216
|
+
def interval(column)
|
217
|
+
aggregate_dataset.get{max(column) - min(column)}
|
218
|
+
end
|
219
|
+
|
220
|
+
# Reverses the order and then runs first. Note that this
|
221
|
+
# will not necessarily give you the last record in the dataset,
|
222
|
+
# unless you have an unambiguous order. If there is not
|
223
|
+
# currently an order for this dataset, raises an Error.
|
224
|
+
def last(*args, &block)
|
225
|
+
raise(Error, 'No order specified') unless @opts[:order]
|
226
|
+
reverse.first(*args, &block)
|
227
|
+
end
|
228
|
+
|
229
|
+
# Maps column values for each record in the dataset (if a column name is
|
230
|
+
# given), or performs the stock mapping functionality of Enumerable.
|
231
|
+
# Raises an error if both an argument and block are given. Examples:
|
232
|
+
#
|
233
|
+
# ds.map(:id) => [1, 2, 3, ...]
|
234
|
+
# ds.map{|r| r[:id] * 2} => [2, 4, 6, ...]
|
235
|
+
def map(column=nil, &block)
|
236
|
+
if column
|
237
|
+
raise(Error, ARG_BLOCK_ERROR_MSG) if block
|
238
|
+
super(){|r| r[column]}
|
239
|
+
else
|
240
|
+
super(&block)
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
244
|
+
# Returns the maximum value for the given column.
|
245
|
+
def max(column)
|
246
|
+
aggregate_dataset.get{max(column)}
|
247
|
+
end
|
248
|
+
|
249
|
+
# Returns the minimum value for the given column.
|
250
|
+
def min(column)
|
251
|
+
aggregate_dataset.get{min(column)}
|
252
|
+
end
|
253
|
+
|
254
|
+
# This is a front end for import that allows you to submit an array of
|
255
|
+
# hashes instead of arrays of columns and values:
|
256
|
+
#
|
257
|
+
# dataset.multi_insert([{:x => 1}, {:x => 2}])
|
258
|
+
#
|
259
|
+
# Be aware that all hashes should have the same keys if you use this calling method,
|
260
|
+
# otherwise some columns could be missed or set to null instead of to default
|
261
|
+
# values.
|
262
|
+
#
|
263
|
+
# You can also use the :slice or :commit_every option that import accepts.
|
264
|
+
def multi_insert(hashes, opts={})
|
265
|
+
return if hashes.empty?
|
266
|
+
columns = hashes.first.keys
|
267
|
+
import(columns, hashes.map{|h| columns.map{|c| h[c]}}, opts)
|
268
|
+
end
|
269
|
+
|
270
|
+
# Returns a Range object made from the minimum and maximum values for the
|
271
|
+
# given column.
|
272
|
+
def range(column)
|
273
|
+
if r = aggregate_dataset.select{[min(column).as(v1), max(column).as(v2)]}.first
|
274
|
+
(r[:v1]..r[:v2])
|
275
|
+
end
|
276
|
+
end
|
277
|
+
|
278
|
+
# Returns a hash with key_column values as keys and value_column values as
|
279
|
+
# values. Similar to to_hash, but only selects the two columns.
|
280
|
+
def select_hash(key_column, value_column)
|
281
|
+
select(key_column, value_column).to_hash(hash_key_symbol(key_column), hash_key_symbol(value_column))
|
282
|
+
end
|
283
|
+
|
284
|
+
# Selects the column given (either as an argument or as a block), and
|
285
|
+
# returns an array of all values of that column in the dataset. If you
|
286
|
+
# give a block argument that returns an array with multiple entries,
|
287
|
+
# the contents of the resulting array are undefined.
|
288
|
+
def select_map(column=nil, &block)
|
289
|
+
ds = naked.ungraphed
|
290
|
+
ds = if column
|
291
|
+
raise(Error, ARG_BLOCK_ERROR_MSG) if block
|
292
|
+
ds.select(column)
|
293
|
+
else
|
294
|
+
ds.select(&block)
|
295
|
+
end
|
296
|
+
ds.map{|r| r.values.first}
|
297
|
+
end
|
298
|
+
|
299
|
+
# The same as select_map, but in addition orders the array by the column.
|
300
|
+
def select_order_map(column=nil, &block)
|
301
|
+
ds = naked.ungraphed
|
302
|
+
ds = if column
|
303
|
+
raise(Error, ARG_BLOCK_ERROR_MSG) if block
|
304
|
+
ds.select(column).order(unaliased_identifier(column))
|
305
|
+
else
|
306
|
+
ds.select(&block).order(&block)
|
307
|
+
end
|
308
|
+
ds.map{|r| r.values.first}
|
309
|
+
end
|
310
|
+
|
311
|
+
# Alias for update, but not aliased directly so subclasses
|
312
|
+
# don't have to override both methods.
|
313
|
+
def set(*args)
|
314
|
+
update(*args)
|
315
|
+
end
|
316
|
+
|
317
|
+
# Returns the first record in the dataset.
|
318
|
+
def single_record
|
319
|
+
clone(:limit=>1).each{|r| return r}
|
320
|
+
nil
|
321
|
+
end
|
322
|
+
|
323
|
+
# Returns the first value of the first record in the dataset.
|
324
|
+
# Returns nil if dataset is empty.
|
325
|
+
def single_value
|
326
|
+
if r = naked.ungraphed.single_record
|
327
|
+
r.values.first
|
328
|
+
end
|
329
|
+
end
|
330
|
+
|
331
|
+
# Returns the sum for the given column.
|
332
|
+
def sum(column)
|
333
|
+
aggregate_dataset.get{sum(column)}
|
334
|
+
end
|
335
|
+
|
336
|
+
# Returns a string in CSV format containing the dataset records. By
|
337
|
+
# default the CSV representation includes the column titles in the
|
338
|
+
# first line. You can turn that off by passing false as the
|
339
|
+
# include_column_titles argument.
|
340
|
+
#
|
341
|
+
# This does not use a CSV library or handle quoting of values in
|
342
|
+
# any way. If any values in any of the rows could include commas or line
|
343
|
+
# endings, you shouldn't use this.
|
344
|
+
def to_csv(include_column_titles = true)
|
345
|
+
n = naked
|
346
|
+
cols = n.columns
|
347
|
+
csv = ''
|
348
|
+
csv << "#{cols.join(COMMA_SEPARATOR)}\r\n" if include_column_titles
|
349
|
+
n.each{|r| csv << "#{cols.collect{|c| r[c]}.join(COMMA_SEPARATOR)}\r\n"}
|
350
|
+
csv
|
351
|
+
end
|
352
|
+
|
353
|
+
# Returns a hash with one column used as key and another used as value.
|
354
|
+
# If rows have duplicate values for the key column, the latter row(s)
|
355
|
+
# will overwrite the value of the previous row(s). If the value_column
|
356
|
+
# is not given or nil, uses the entire hash as the value.
|
357
|
+
def to_hash(key_column, value_column = nil)
|
358
|
+
inject({}) do |m, r|
|
359
|
+
m[r[key_column]] = value_column ? r[value_column] : r
|
360
|
+
m
|
361
|
+
end
|
362
|
+
end
|
363
|
+
|
364
|
+
# Truncates the dataset. Returns nil.
|
365
|
+
def truncate
|
366
|
+
execute_ddl(truncate_sql)
|
367
|
+
end
|
368
|
+
|
369
|
+
# Updates values for the dataset. The returned value is generally the
|
370
|
+
# number of rows updated, but that is adapter dependent. See update_sql.
|
371
|
+
def update(values={})
|
372
|
+
execute_dui(update_sql(values))
|
373
|
+
end
|
374
|
+
|
375
|
+
private
|
376
|
+
|
377
|
+
# Set the server to use to :default unless it is already set in the passed opts
|
378
|
+
def default_server_opts(opts)
|
379
|
+
{:server=>@opts[:server] || :default}.merge(opts)
|
380
|
+
end
|
381
|
+
|
382
|
+
# Execute the given SQL on the database using execute.
|
383
|
+
def execute(sql, opts={}, &block)
|
384
|
+
@db.execute(sql, {:server=>@opts[:server] || :read_only}.merge(opts), &block)
|
385
|
+
end
|
386
|
+
|
387
|
+
# Execute the given SQL on the database using execute_ddl.
|
388
|
+
def execute_ddl(sql, opts={}, &block)
|
389
|
+
@db.execute_ddl(sql, default_server_opts(opts), &block)
|
390
|
+
nil
|
391
|
+
end
|
392
|
+
|
393
|
+
# Execute the given SQL on the database using execute_dui.
|
394
|
+
def execute_dui(sql, opts={}, &block)
|
395
|
+
@db.execute_dui(sql, default_server_opts(opts), &block)
|
396
|
+
end
|
397
|
+
|
398
|
+
# Execute the given SQL on the database using execute_insert.
|
399
|
+
def execute_insert(sql, opts={}, &block)
|
400
|
+
@db.execute_insert(sql, default_server_opts(opts), &block)
|
401
|
+
end
|
402
|
+
|
403
|
+
# Return a plain symbol given a potentially qualified or aliased symbol,
|
404
|
+
# specifying the symbol that is likely to be used as the hash key
|
405
|
+
# for the column when records are returned.
|
406
|
+
def hash_key_symbol(s)
|
407
|
+
raise(Error, "#{s.inspect} is not a symbol") unless s.is_a?(Symbol)
|
408
|
+
_, c, a = split_symbol(s)
|
409
|
+
(a || c).to_sym
|
410
|
+
end
|
411
|
+
|
412
|
+
# Modify the identifier returned from the database based on the
|
413
|
+
# identifier_output_method.
|
414
|
+
def output_identifier(v)
|
415
|
+
v = 'untitled' if v == ''
|
416
|
+
(i = identifier_output_method) ? v.to_s.send(i).to_sym : v.to_sym
|
417
|
+
end
|
418
|
+
|
419
|
+
# This is run inside .all, after all of the records have been loaded
|
420
|
+
# via .each, but before any block passed to all is called. It is called with
|
421
|
+
# a single argument, an array of all returned records. Does nothing by
|
422
|
+
# default, added to make the model eager loading code simpler.
|
423
|
+
def post_load(all_records)
|
424
|
+
end
|
425
|
+
|
426
|
+
# Return the unaliased part of the identifier. Handles both
|
427
|
+
# implicit aliases in symbols, as well as SQL::AliasedExpression
|
428
|
+
# objects. Other objects are returned as is.
|
429
|
+
def unaliased_identifier(c)
|
430
|
+
case c
|
431
|
+
when Symbol
|
432
|
+
c_table, column, _ = split_symbol(c)
|
433
|
+
c_table ? column.to_sym.qualify(c_table) : column.to_sym
|
434
|
+
when SQL::AliasedExpression
|
435
|
+
c.expression
|
436
|
+
else
|
437
|
+
c
|
438
|
+
end
|
439
|
+
end
|
440
|
+
end
|
441
|
+
end
|