epugh-sequel 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. data/README.rdoc +652 -0
  2. data/VERSION.yml +4 -0
  3. data/bin/sequel +104 -0
  4. data/lib/sequel.rb +1 -0
  5. data/lib/sequel/adapters/ado.rb +85 -0
  6. data/lib/sequel/adapters/db2.rb +132 -0
  7. data/lib/sequel/adapters/dbi.rb +101 -0
  8. data/lib/sequel/adapters/do.rb +197 -0
  9. data/lib/sequel/adapters/do/mysql.rb +38 -0
  10. data/lib/sequel/adapters/do/postgres.rb +92 -0
  11. data/lib/sequel/adapters/do/sqlite.rb +31 -0
  12. data/lib/sequel/adapters/firebird.rb +307 -0
  13. data/lib/sequel/adapters/informix.rb +75 -0
  14. data/lib/sequel/adapters/jdbc.rb +485 -0
  15. data/lib/sequel/adapters/jdbc/h2.rb +62 -0
  16. data/lib/sequel/adapters/jdbc/mysql.rb +56 -0
  17. data/lib/sequel/adapters/jdbc/oracle.rb +23 -0
  18. data/lib/sequel/adapters/jdbc/postgresql.rb +101 -0
  19. data/lib/sequel/adapters/jdbc/sqlite.rb +43 -0
  20. data/lib/sequel/adapters/mysql.rb +370 -0
  21. data/lib/sequel/adapters/odbc.rb +184 -0
  22. data/lib/sequel/adapters/openbase.rb +57 -0
  23. data/lib/sequel/adapters/oracle.rb +140 -0
  24. data/lib/sequel/adapters/postgres.rb +453 -0
  25. data/lib/sequel/adapters/shared/mssql.rb +93 -0
  26. data/lib/sequel/adapters/shared/mysql.rb +341 -0
  27. data/lib/sequel/adapters/shared/oracle.rb +62 -0
  28. data/lib/sequel/adapters/shared/postgres.rb +743 -0
  29. data/lib/sequel/adapters/shared/progress.rb +34 -0
  30. data/lib/sequel/adapters/shared/sqlite.rb +263 -0
  31. data/lib/sequel/adapters/sqlite.rb +243 -0
  32. data/lib/sequel/adapters/utils/date_format.rb +21 -0
  33. data/lib/sequel/adapters/utils/stored_procedures.rb +75 -0
  34. data/lib/sequel/adapters/utils/unsupported.rb +62 -0
  35. data/lib/sequel/connection_pool.rb +258 -0
  36. data/lib/sequel/core.rb +204 -0
  37. data/lib/sequel/core_sql.rb +185 -0
  38. data/lib/sequel/database.rb +687 -0
  39. data/lib/sequel/database/schema_generator.rb +324 -0
  40. data/lib/sequel/database/schema_methods.rb +164 -0
  41. data/lib/sequel/database/schema_sql.rb +324 -0
  42. data/lib/sequel/dataset.rb +422 -0
  43. data/lib/sequel/dataset/convenience.rb +237 -0
  44. data/lib/sequel/dataset/prepared_statements.rb +220 -0
  45. data/lib/sequel/dataset/sql.rb +1105 -0
  46. data/lib/sequel/deprecated.rb +529 -0
  47. data/lib/sequel/exceptions.rb +44 -0
  48. data/lib/sequel/extensions/blank.rb +42 -0
  49. data/lib/sequel/extensions/inflector.rb +288 -0
  50. data/lib/sequel/extensions/pagination.rb +96 -0
  51. data/lib/sequel/extensions/pretty_table.rb +78 -0
  52. data/lib/sequel/extensions/query.rb +48 -0
  53. data/lib/sequel/extensions/string_date_time.rb +47 -0
  54. data/lib/sequel/metaprogramming.rb +44 -0
  55. data/lib/sequel/migration.rb +212 -0
  56. data/lib/sequel/model.rb +142 -0
  57. data/lib/sequel/model/association_reflection.rb +263 -0
  58. data/lib/sequel/model/associations.rb +1024 -0
  59. data/lib/sequel/model/base.rb +911 -0
  60. data/lib/sequel/model/deprecated.rb +188 -0
  61. data/lib/sequel/model/deprecated_hooks.rb +103 -0
  62. data/lib/sequel/model/deprecated_inflector.rb +335 -0
  63. data/lib/sequel/model/deprecated_validations.rb +384 -0
  64. data/lib/sequel/model/errors.rb +37 -0
  65. data/lib/sequel/model/exceptions.rb +7 -0
  66. data/lib/sequel/model/inflections.rb +230 -0
  67. data/lib/sequel/model/plugins.rb +74 -0
  68. data/lib/sequel/object_graph.rb +230 -0
  69. data/lib/sequel/plugins/caching.rb +122 -0
  70. data/lib/sequel/plugins/hook_class_methods.rb +122 -0
  71. data/lib/sequel/plugins/schema.rb +53 -0
  72. data/lib/sequel/plugins/single_table_inheritance.rb +63 -0
  73. data/lib/sequel/plugins/validation_class_methods.rb +373 -0
  74. data/lib/sequel/sql.rb +854 -0
  75. data/lib/sequel/version.rb +11 -0
  76. data/lib/sequel_core.rb +1 -0
  77. data/lib/sequel_model.rb +1 -0
  78. data/spec/adapters/ado_spec.rb +46 -0
  79. data/spec/adapters/firebird_spec.rb +376 -0
  80. data/spec/adapters/informix_spec.rb +96 -0
  81. data/spec/adapters/mysql_spec.rb +875 -0
  82. data/spec/adapters/oracle_spec.rb +272 -0
  83. data/spec/adapters/postgres_spec.rb +692 -0
  84. data/spec/adapters/spec_helper.rb +10 -0
  85. data/spec/adapters/sqlite_spec.rb +550 -0
  86. data/spec/core/connection_pool_spec.rb +526 -0
  87. data/spec/core/core_ext_spec.rb +156 -0
  88. data/spec/core/core_sql_spec.rb +528 -0
  89. data/spec/core/database_spec.rb +1214 -0
  90. data/spec/core/dataset_spec.rb +3513 -0
  91. data/spec/core/expression_filters_spec.rb +363 -0
  92. data/spec/core/migration_spec.rb +261 -0
  93. data/spec/core/object_graph_spec.rb +280 -0
  94. data/spec/core/pretty_table_spec.rb +58 -0
  95. data/spec/core/schema_generator_spec.rb +167 -0
  96. data/spec/core/schema_spec.rb +778 -0
  97. data/spec/core/spec_helper.rb +82 -0
  98. data/spec/core/version_spec.rb +7 -0
  99. data/spec/extensions/blank_spec.rb +67 -0
  100. data/spec/extensions/caching_spec.rb +201 -0
  101. data/spec/extensions/hook_class_methods_spec.rb +470 -0
  102. data/spec/extensions/inflector_spec.rb +122 -0
  103. data/spec/extensions/pagination_spec.rb +99 -0
  104. data/spec/extensions/pretty_table_spec.rb +91 -0
  105. data/spec/extensions/query_spec.rb +85 -0
  106. data/spec/extensions/schema_spec.rb +111 -0
  107. data/spec/extensions/single_table_inheritance_spec.rb +53 -0
  108. data/spec/extensions/spec_helper.rb +90 -0
  109. data/spec/extensions/string_date_time_spec.rb +93 -0
  110. data/spec/extensions/validation_class_methods_spec.rb +1054 -0
  111. data/spec/integration/dataset_test.rb +160 -0
  112. data/spec/integration/eager_loader_test.rb +683 -0
  113. data/spec/integration/prepared_statement_test.rb +130 -0
  114. data/spec/integration/schema_test.rb +183 -0
  115. data/spec/integration/spec_helper.rb +75 -0
  116. data/spec/integration/type_test.rb +96 -0
  117. data/spec/model/association_reflection_spec.rb +93 -0
  118. data/spec/model/associations_spec.rb +1780 -0
  119. data/spec/model/base_spec.rb +494 -0
  120. data/spec/model/caching_spec.rb +217 -0
  121. data/spec/model/dataset_methods_spec.rb +78 -0
  122. data/spec/model/eager_loading_spec.rb +1165 -0
  123. data/spec/model/hooks_spec.rb +472 -0
  124. data/spec/model/inflector_spec.rb +126 -0
  125. data/spec/model/model_spec.rb +588 -0
  126. data/spec/model/plugins_spec.rb +142 -0
  127. data/spec/model/record_spec.rb +1243 -0
  128. data/spec/model/schema_spec.rb +92 -0
  129. data/spec/model/spec_helper.rb +124 -0
  130. data/spec/model/validations_spec.rb +1080 -0
  131. data/spec/rcov.opts +6 -0
  132. data/spec/spec.opts +0 -0
  133. data/spec/spec_config.rb.example +10 -0
  134. metadata +202 -0
@@ -0,0 +1,324 @@
1
+ module Sequel
2
+ class Database
3
+ AUTOINCREMENT = 'AUTOINCREMENT'.freeze
4
+ CASCADE = 'CASCADE'.freeze
5
+ COMMA_SEPARATOR = ', '.freeze
6
+ NO_ACTION = 'NO ACTION'.freeze
7
+ NOT_NULL = ' NOT NULL'.freeze
8
+ NULL = ' NULL'.freeze
9
+ PRIMARY_KEY = ' PRIMARY KEY'.freeze
10
+ RESTRICT = 'RESTRICT'.freeze
11
+ SET_DEFAULT = 'SET DEFAULT'.freeze
12
+ SET_NULL = 'SET NULL'.freeze
13
+ TYPES = Hash.new {|h, k| k}
14
+ TYPES.merge!(:double=>'double precision', String=>'varchar(255)',
15
+ Integer=>'integer', Fixnum=>'integer', Bignum=>'bigint',
16
+ Float=>'double precision', BigDecimal=>'numeric', Numeric=>'numeric',
17
+ Date=>'date', DateTime=>'timestamp', Time=>'timestamp', File=>'blob',
18
+ TrueClass=>'boolean', FalseClass=>'boolean')
19
+ UNDERSCORE = '_'.freeze
20
+ UNIQUE = ' UNIQUE'.freeze
21
+ UNSIGNED = ' UNSIGNED'.freeze
22
+
23
+ # The SQL to execute to modify the DDL for the given table name. op
24
+ # should be one of the operations returned by the AlterTableGenerator.
25
+ def alter_table_sql(table, op)
26
+ quoted_name = quote_identifier(op[:name]) if op[:name]
27
+ alter_table_op = case op[:op]
28
+ when :add_column
29
+ "ADD COLUMN #{column_definition_sql(op)}"
30
+ when :drop_column
31
+ "DROP COLUMN #{quoted_name}"
32
+ when :rename_column
33
+ "RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
34
+ when :set_column_type
35
+ "ALTER COLUMN #{quoted_name} TYPE #{type_literal(op)}"
36
+ when :set_column_default
37
+ "ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
38
+ when :set_column_null
39
+ "ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL"
40
+ when :add_index
41
+ return index_definition_sql(table, op)
42
+ when :drop_index
43
+ return drop_index_sql(table, op)
44
+ when :add_constraint
45
+ "ADD #{constraint_definition_sql(op)}"
46
+ when :drop_constraint
47
+ "DROP CONSTRAINT #{quoted_name}"
48
+ else
49
+ raise Error, "Unsupported ALTER TABLE operation"
50
+ end
51
+ "ALTER TABLE #{quote_schema_table(table)} #{alter_table_op}"
52
+ end
53
+
54
+ # Array of SQL DDL modification statements for the given table,
55
+ # corresponding to the DDL changes specified by the operations.
56
+ def alter_table_sql_list(table, operations)
57
+ operations.map{|op| alter_table_sql(table, op)}
58
+ end
59
+
60
+ # The SQL string specify the autoincrement property, generally used by
61
+ # primary keys.
62
+ def auto_increment_sql
63
+ AUTOINCREMENT
64
+ end
65
+
66
+ # SQL DDL fragment containing the column creation SQL for the given column.
67
+ def column_definition_sql(column)
68
+ return constraint_definition_sql(column) if column[:type] == :check
69
+ sql = "#{quote_identifier(column[:name])} #{type_literal(column)}"
70
+ sql << UNIQUE if column[:unique]
71
+ sql << NOT_NULL if column[:null] == false
72
+ sql << NULL if column[:null] == true
73
+ sql << " DEFAULT #{literal(column[:default])}" if column.include?(:default)
74
+ sql << PRIMARY_KEY if column[:primary_key]
75
+ sql << " #{auto_increment_sql}" if column[:auto_increment]
76
+ sql << column_references_sql(column) if column[:table]
77
+ sql
78
+ end
79
+
80
+ # SQL DDL fragment containing the column creation
81
+ # SQL for all given columns, used instead a CREATE TABLE block.
82
+ def column_list_sql(columns)
83
+ columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)
84
+ end
85
+
86
+ # SQL DDL fragment for column foreign key references
87
+ def column_references_sql(column)
88
+ sql = " REFERENCES #{quote_schema_table(column[:table])}"
89
+ sql << "(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(COMMA_SEPARATOR)})" if column[:key]
90
+ sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete]
91
+ sql << " ON UPDATE #{on_delete_clause(column[:on_update])}" if column[:on_update]
92
+ sql
93
+ end
94
+
95
+ # SQL DDL fragment specifying a constraint on a table.
96
+ def constraint_definition_sql(constraint)
97
+ sql = constraint[:name] ? "CONSTRAINT #{quote_identifier(constraint[:name])} " : ""
98
+ case constraint[:constraint_type]
99
+ when :primary_key
100
+ sql << "PRIMARY KEY #{literal(constraint[:columns])}"
101
+ when :foreign_key
102
+ sql << "FOREIGN KEY #{literal(constraint[:columns])}"
103
+ sql << column_references_sql(constraint)
104
+ when :unique
105
+ sql << "UNIQUE #{literal(constraint[:columns])}"
106
+ else
107
+ check = constraint[:check]
108
+ sql << "CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}"
109
+ end
110
+ sql
111
+ end
112
+
113
+ # Array of SQL DDL statements, the first for creating a table with the given
114
+ # name and column specifications, and the others for specifying indexes on
115
+ # the table.
116
+ def create_table_sql_list(name, columns, indexes = nil, options = {})
117
+ sql = ["CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})"]
118
+ sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?
119
+ sql
120
+ end
121
+
122
+ # Default index name for the table and columns, may be too long
123
+ # for certain databases.
124
+ def default_index_name(table_name, columns)
125
+ schema, table = schema_and_table(table_name)
126
+ "#{"#{schema}_" if schema and schema != default_schema}#{table}_#{columns.map{|c| [String, Symbol].any?{|cl| c.is_a?(cl)} ? c : literal(c).gsub(/\W/, '_')}.join(UNDERSCORE)}_index"
127
+ end
128
+
129
+ # The SQL to drop an index for the table.
130
+ def drop_index_sql(table, op)
131
+ "DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}"
132
+ end
133
+
134
+ # SQL DDL statement to drop the table with the given name.
135
+ def drop_table_sql(name)
136
+ "DROP TABLE #{quote_schema_table(name)}"
137
+ end
138
+
139
+ # Proxy the filter_expr call to the dataset, used for creating constraints.
140
+ def filter_expr(*args, &block)
141
+ schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))
142
+ end
143
+
144
+ # SQL DDL statement for creating an index for the table with the given name
145
+ # and index specifications.
146
+ def index_definition_sql(table_name, index)
147
+ index_name = index[:name] || default_index_name(table_name, index[:columns])
148
+ if index[:type]
149
+ raise Error, "Index types are not supported for this database"
150
+ elsif index[:where]
151
+ raise Error, "Partial indexes are not supported for this database"
152
+ else
153
+ "CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}"
154
+ end
155
+ end
156
+
157
+ # Array of SQL DDL statements, one for each index specification,
158
+ # for the given table.
159
+ def index_list_sql_list(table_name, indexes)
160
+ indexes.map{|i| index_definition_sql(table_name, i)}
161
+ end
162
+
163
+ # Proxy the literal call to the dataset, used for default values.
164
+ def literal(v)
165
+ schema_utility_dataset.literal(v)
166
+ end
167
+
168
+ # SQL DDL ON DELETE fragment to use, based on the given action.
169
+ # The following actions are recognized:
170
+ #
171
+ # * :cascade - Delete rows referencing this row.
172
+ # * :no_action (default) - Raise an error if other rows reference this
173
+ # row, allow deferring of the integrity check.
174
+ # * :restrict - Raise an error if other rows reference this row,
175
+ # but do not allow deferring the integrity check.
176
+ # * :set_default - Set columns referencing this row to their default value.
177
+ # * :set_null - Set columns referencing this row to NULL.
178
+ def on_delete_clause(action)
179
+ case action
180
+ when :restrict
181
+ RESTRICT
182
+ when :cascade
183
+ CASCADE
184
+ when :set_null
185
+ SET_NULL
186
+ when :set_default
187
+ SET_DEFAULT
188
+ else
189
+ NO_ACTION
190
+ end
191
+ end
192
+
193
+ # Proxy the quote_schema_table method to the dataset
194
+ def quote_schema_table(table)
195
+ schema_utility_dataset.quote_schema_table(table)
196
+ end
197
+
198
+ # Proxy the quote_identifier method to the dataset, used for quoting tables and columns.
199
+ def quote_identifier(v)
200
+ schema_utility_dataset.quote_identifier(v)
201
+ end
202
+
203
+ # SQL DDL statement for renaming a table.
204
+ def rename_table_sql(name, new_name)
205
+ "ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}"
206
+ end
207
+
208
+ # Parse the schema from the database.
209
+ # If the table_name is not given, returns the schema for all tables as a hash.
210
+ # If the table_name is given, returns the schema for a single table as an
211
+ # array with all members being arrays of length 2. Available options are:
212
+ #
213
+ # * :reload - Get fresh information from the database, instead of using
214
+ # cached information. If table_name is blank, :reload should be used
215
+ # unless you are sure that schema has not been called before with a
216
+ # table_name, otherwise you may only getting the schemas for tables
217
+ # that have been requested explicitly.
218
+ # * :schema - An explicit schema to use. It may also be implicitly provided
219
+ # via the table name.
220
+ def schema(table = nil, opts={})
221
+ Deprecation.deprecate('Calling Database#schema without a table argument', 'Use database.tables.inject({}){|h, m| h[m] = database.schema(m); h}') unless table
222
+ raise(Error, 'schema parsing is not implemented on this database') unless respond_to?(:schema_parse_table, true)
223
+
224
+ if table
225
+ sch, table_name = schema_and_table(table)
226
+ quoted_name = quote_schema_table(table)
227
+ end
228
+ opts = opts.merge(:schema=>sch) if sch && !opts.include?(:schema)
229
+ if opts[:reload] && @schemas
230
+ if table_name
231
+ @schemas.delete(quoted_name)
232
+ else
233
+ @schemas = nil
234
+ end
235
+ end
236
+
237
+ if @schemas
238
+ if table_name
239
+ return @schemas[quoted_name] if @schemas[quoted_name]
240
+ else
241
+ return @schemas
242
+ end
243
+ end
244
+
245
+ raise(Error, '#tables does not exist, you must provide a specific table to #schema') if table.nil? && !respond_to?(:tables, true)
246
+
247
+ @schemas ||= Hash.new do |h,k|
248
+ quote_name = quote_schema_table(k)
249
+ h[quote_name] if h.include?(quote_name)
250
+ end
251
+
252
+ if table_name
253
+ cols = schema_parse_table(table_name, opts)
254
+ raise(Error, 'schema parsing returned no columns, table probably doesn\'t exist') if cols.nil? || cols.empty?
255
+ @schemas[quoted_name] = cols
256
+ else
257
+ tables.each{|t| @schemas[quote_schema_table(t)] = schema_parse_table(t.to_s, opts)}
258
+ @schemas
259
+ end
260
+ end
261
+
262
+ # The dataset to use for proxying certain schema methods.
263
+ def schema_utility_dataset
264
+ @schema_utility_dataset ||= dataset
265
+ end
266
+
267
+ private
268
+
269
+ # Remove the cached schema for the given schema name
270
+ def remove_cached_schema(table)
271
+ @schemas.delete(quote_schema_table(table)) if @schemas
272
+ end
273
+
274
+ # Remove the cached schema_utility_dataset, because the identifier
275
+ # quoting has changed.
276
+ def reset_schema_utility_dataset
277
+ @schema_utility_dataset = nil
278
+ end
279
+
280
+ # Match the database's column type to a ruby type via a
281
+ # regular expression. The following ruby types are supported:
282
+ # integer, string, date, datetime, boolean, and float.
283
+ def schema_column_type(db_type)
284
+ case db_type
285
+ when /\Atinyint/io
286
+ Sequel.convert_tinyint_to_bool ? :boolean : :integer
287
+ when /\Ainterval\z/io
288
+ :interval
289
+ when /\A(character( varying)?|varchar|text)/io
290
+ :string
291
+ when /\A(int(eger)?|bigint|smallint)/io
292
+ :integer
293
+ when /\Adate\z/io
294
+ :date
295
+ when /\A(datetime|timestamp( with(out)? time zone)?)\z/io
296
+ :datetime
297
+ when /\Atime( with(out)? time zone)?\z/io
298
+ :time
299
+ when /\Aboolean\z/io
300
+ :boolean
301
+ when /\A(real|float|double( precision)?)\z/io
302
+ :float
303
+ when /\A(numeric(\(\d+,\d+\))?|decimal|money)\z/io
304
+ :decimal
305
+ when /bytea|blob/io
306
+ :blob
307
+ end
308
+ end
309
+
310
+ # SQL fragment specifying the type of a given column.
311
+ def type_literal(column)
312
+ type = type_literal_base(column)
313
+ column[:size] ||= 255 if type.to_s == 'varchar'
314
+ elements = column[:size] || column[:elements]
315
+ "#{type}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}"
316
+ end
317
+
318
+ # SQL fragment specifying the base type of a given column,
319
+ # without the size or elements.
320
+ def type_literal_base(column)
321
+ TYPES[column[:type]]
322
+ end
323
+ end
324
+ end
@@ -0,0 +1,422 @@
1
+ module Sequel
2
+ # A Dataset represents a view of a the data in a database, constrained by
3
+ # specific parameters such as filtering conditions, order, etc. Datasets
4
+ # can be used to create, retrieve, update and delete records.
5
+ #
6
+ # Query results are always retrieved on demand, so a dataset can be kept
7
+ # around and reused indefinitely:
8
+ #
9
+ # my_posts = DB[:posts].filter(:author => 'david') # no records are retrieved
10
+ # p my_posts.all # records are now retrieved
11
+ # ...
12
+ # p my_posts.all # records are retrieved again
13
+ #
14
+ # In order to provide this functionality, dataset methods such as where,
15
+ # select, order, etc. return modified copies of the dataset, so you can
16
+ # use different datasets to access data:
17
+ #
18
+ # posts = DB[:posts]
19
+ # davids_posts = posts.filter(:author => 'david')
20
+ # old_posts = posts.filter('stamp < ?', Date.today - 7)
21
+ #
22
+ # Datasets are Enumerable objects, so they can be manipulated using any
23
+ # of the Enumerable methods, such as map, inject, etc.
24
+ #
25
+ # === Methods added via metaprogramming
26
+ #
27
+ # Some methods are added via metaprogramming:
28
+ #
29
+ # * ! methods - These methods are the same as their non-! counterparts,
30
+ # but they modify the receiver instead of returning a modified copy
31
+ # of the dataset.
32
+ # * inner_join, full_outer_join, right_outer_join, left_outer_join -
33
+ # This methods are shortcuts to join_table with the join type
34
+ # already specified.
35
+ class Dataset
36
+ extend Metaprogramming
37
+ include Metaprogramming
38
+ include Enumerable
39
+
40
+ # The dataset options that require the removal of cached columns
41
+ # if changed.
42
+ COLUMN_CHANGE_OPTS = [:select, :sql, :from, :join].freeze
43
+
44
+ # All methods that should have a ! method added that modifies
45
+ # the receiver.
46
+ MUTATION_METHODS = %w'add_graph_aliases and distinct exclude exists
47
+ filter from from_self full_outer_join graph
48
+ group group_and_count group_by having inner_join intersect invert join
49
+ left_outer_join limit naked or order order_by order_more paginate query reject
50
+ reverse reverse_order right_outer_join select select_all select_more
51
+ set_defaults set_graph_aliases set_overrides sort sort_by
52
+ unfiltered union unordered where with_sql'.collect{|x| x.to_sym}
53
+
54
+ NOTIMPL_MSG = "This method must be overridden in Sequel adapters".freeze
55
+ STOCK_TRANSFORMS = {
56
+ :marshal => [
57
+ # for backwards-compatibility we support also non-base64-encoded values.
58
+ proc {|v| Marshal.load(v.unpack('m')[0]) rescue Marshal.load(v)},
59
+ proc {|v| [Marshal.dump(v)].pack('m')}
60
+ ],
61
+ :yaml => [
62
+ proc {|v| YAML.load v if v},
63
+ proc {|v| v.to_yaml}
64
+ ]
65
+ }
66
+
67
+ # The database that corresponds to this dataset
68
+ attr_accessor :db
69
+
70
+ # Set the method to call on identifiers going into the database for this dataset
71
+ attr_accessor :identifier_input_method
72
+
73
+ # Set the method to call on identifiers coming the database for this dataset
74
+ attr_accessor :identifier_output_method
75
+
76
+ # The hash of options for this dataset, keys are symbols.
77
+ attr_accessor :opts
78
+
79
+ # Whether to quote identifiers for this dataset
80
+ attr_writer :quote_identifiers
81
+
82
+ # The row_proc for this database, should be a Proc that takes
83
+ # a single hash argument and returns the object you want to
84
+ # fetch_rows to return.
85
+ attr_accessor :row_proc
86
+
87
+ # Constructs a new instance of a dataset with an associated database and
88
+ # options. Datasets are usually constructed by invoking Database methods:
89
+ #
90
+ # DB[:posts]
91
+ #
92
+ # Or:
93
+ #
94
+ # DB.dataset # the returned dataset is blank
95
+ #
96
+ # Sequel::Dataset is an abstract class that is not useful by itself. Each
97
+ # database adaptor should provide a descendant class of Sequel::Dataset.
98
+ def initialize(db, opts = nil)
99
+ @db = db
100
+ @quote_identifiers = db.quote_identifiers? if db.respond_to?(:quote_identifiers?)
101
+ @identifier_input_method = db.identifier_input_method if db.respond_to?(:identifier_input_method)
102
+ @identifier_output_method = db.identifier_output_method if db.respond_to?(:identifier_output_method)
103
+ @opts = opts || {}
104
+ @row_proc = nil
105
+ @transform = nil
106
+ end
107
+
108
+ ### Class Methods ###
109
+
110
+ # Setup mutation (e.g. filter!) methods. These operate the same as the
111
+ # non-! methods, but replace the options of the current dataset with the
112
+ # options of the resulting dataset.
113
+ def self.def_mutation_method(*meths)
114
+ meths.each do |meth|
115
+ class_eval("def #{meth}!(*args, &block); mutation_method(:#{meth}, *args, &block) end")
116
+ end
117
+ end
118
+
119
+ ### Instance Methods ###
120
+
121
+ # Alias for insert, but not aliased directly so subclasses
122
+ # don't have to override both methods.
123
+ def <<(*args)
124
+ insert(*args)
125
+ end
126
+
127
+ # Return the dataset as a column with the given alias, so it can be used in the
128
+ # SELECT clause. This dataset should result in a single row and a single column.
129
+ def as(aliaz)
130
+ ::Sequel::SQL::AliasedExpression.new(self, aliaz)
131
+ end
132
+
133
+ # Returns an array with all records in the dataset. If a block is given,
134
+ # the array is iterated over after all items have been loaded.
135
+ def all(opts = (defarg=true;nil), &block)
136
+ Deprecation.deprecate("Calling Dataset#all with an argument is deprecated and will raise an error in a future version. Use dataset.clone(opts).all.") unless defarg
137
+ a = []
138
+ defarg ? each{|r| a << r} : each(opts){|r| a << r}
139
+ post_load(a)
140
+ a.each(&block) if block
141
+ a
142
+ end
143
+
144
+ # Returns a new clone of the dataset with with the given options merged.
145
+ # If the options changed include options in COLUMN_CHANGE_OPTS, the cached
146
+ # columns are deleted.
147
+ def clone(opts = {})
148
+ c = super()
149
+ c.opts = @opts.merge(opts)
150
+ c.instance_variable_set(:@columns, nil) if opts.keys.any?{|o| COLUMN_CHANGE_OPTS.include?(o)}
151
+ c
152
+ end
153
+
154
+ # Returns the columns in the result set in their true order.
155
+ # If the columns are currently cached, returns the cached value. Otherwise,
156
+ # a SELECT query is performed to get a single row. Adapters are expected
157
+ # to fill the columns cache with the column information when a query is performed.
158
+ # If the dataset does not have any rows, this will be an empty array.
159
+ # If you are looking for all columns for a single table, see Database#schema.
160
+ def columns
161
+ return @columns if @columns
162
+ ds = unfiltered.unordered.clone(:distinct => nil, :limit => 1)
163
+ ds.each{break}
164
+ @columns = ds.instance_variable_get(:@columns)
165
+ @columns || []
166
+ end
167
+
168
+ # Remove the cached list of columns and do a SELECT query to find
169
+ # the columns.
170
+ def columns!
171
+ @columns = nil
172
+ columns
173
+ end
174
+
175
+ # Add a mutation method to this dataset instance.
176
+ def def_mutation_method(*meths)
177
+ meths.each do |meth|
178
+ instance_eval("def #{meth}!(*args, &block); mutation_method(:#{meth}, *args, &block) end")
179
+ end
180
+ end
181
+
182
+ # Deletes the records in the dataset. The returned value is generally the
183
+ # number of records deleted, but that is adapter dependent.
184
+ def delete(opts=(defarg=true;nil))
185
+ Deprecation.deprecate("Calling Dataset#delete with an argument is deprecated and will raise an error in a future version. Use dataset.clone(opts).delete.") unless defarg
186
+ execute_dui(defarg ? delete_sql : delete_sql(opts))
187
+ end
188
+
189
+ # Iterates over the records in the dataset and returns set. If opts
190
+ # have been passed that modify the columns, reset the column information.
191
+ def each(opts = (defarg=true;nil), &block)
192
+ Deprecation.deprecate("Calling Dataset#each with an argument is deprecated and will raise an error in a future version. Use dataset.clone(opts).each.") unless defarg
193
+ if opts && opts.keys.any?{|o| COLUMN_CHANGE_OPTS.include?(o)}
194
+ prev_columns = @columns
195
+ begin
196
+ defarg ? _each(&block) : _each(opts, &block)
197
+ ensure
198
+ @columns = prev_columns
199
+ end
200
+ else
201
+ defarg ? _each(&block) : _each(opts, &block)
202
+ end
203
+ self
204
+ end
205
+
206
+ # Executes a select query and fetches records, passing each record to the
207
+ # supplied block. The yielded records are generally hashes with symbol keys,
208
+ # but that is adapter dependent.
209
+ def fetch_rows(sql, &block)
210
+ raise NotImplementedError, NOTIMPL_MSG
211
+ end
212
+
213
+ # Inserts values into the associated table. The returned value is generally
214
+ # the value of the primary key for the inserted row, but that is adapter dependent.
215
+ def insert(*values)
216
+ execute_insert(insert_sql(*values))
217
+ end
218
+
219
+ # Returns a string representation of the dataset including the class name
220
+ # and the corresponding SQL select statement.
221
+ def inspect
222
+ "#<#{self.class}: #{sql.inspect}>"
223
+ end
224
+
225
+ # Returns a naked dataset clone - i.e. a dataset that returns records as
226
+ # hashes instead of calling the row proc.
227
+ def naked
228
+ ds = clone
229
+ ds.row_proc = nil
230
+ ds
231
+ end
232
+
233
+ # Whether this dataset quotes identifiers.
234
+ def quote_identifiers?
235
+ @quote_identifiers
236
+ end
237
+
238
+ # Set the server for this dataset to use. Used to pick a specific database
239
+ # shard to run a query against, or to override the default SELECT uses
240
+ # :read_only database and all other queries use the :default database.
241
+ def server(servr)
242
+ clone(:server=>servr)
243
+ end
244
+
245
+ # Alias for set, but not aliased directly so subclasses
246
+ # don't have to override both methods.
247
+ def set(*args)
248
+ update(*args)
249
+ end
250
+
251
+ # Set the default values for insert and update statements. The values passed
252
+ # to insert or update are merged into this hash.
253
+ def set_defaults(hash)
254
+ clone(:defaults=>(@opts[:defaults]||{}).merge(hash))
255
+ end
256
+
257
+ # Set values that override hash arguments given to insert and update statements.
258
+ # This hash is merged into the hash provided to insert or update.
259
+ def set_overrides(hash)
260
+ clone(:overrides=>hash.merge(@opts[:overrides]||{}))
261
+ end
262
+
263
+ # Sets a value transform which is used to convert values loaded and saved
264
+ # to/from the database. The transform should be supplied as a hash. Each
265
+ # value in the hash should be an array containing two proc objects - one
266
+ # for transforming loaded values, and one for transforming saved values.
267
+ # The following example demonstrates how to store Ruby objects in a dataset
268
+ # using Marshal serialization:
269
+ #
270
+ # dataset.transform(:obj => [
271
+ # proc {|v| Marshal.load(v)},
272
+ # proc {|v| Marshal.dump(v)}
273
+ # ])
274
+ #
275
+ # dataset.insert_sql(:obj => 1234) #=>
276
+ # "INSERT INTO items (obj) VALUES ('\004\bi\002\322\004')"
277
+ #
278
+ # Another form of using transform is by specifying stock transforms:
279
+ #
280
+ # dataset.transform(:obj => :marshal)
281
+ #
282
+ # The currently supported stock transforms are :marshal and :yaml.
283
+ def transform(t)
284
+ @transform = t
285
+ t.each do |k, v|
286
+ case v
287
+ when Array
288
+ if (v.size != 2) || !v.first.is_a?(Proc) && !v.last.is_a?(Proc)
289
+ raise Error::InvalidTransform, "Invalid transform specified"
290
+ end
291
+ else
292
+ unless v = STOCK_TRANSFORMS[v]
293
+ raise Error::InvalidTransform, "Invalid transform specified"
294
+ else
295
+ t[k] = v
296
+ end
297
+ end
298
+ end
299
+ self
300
+ end
301
+
302
+ # Applies the value transform for data loaded from the database.
303
+ def transform_load(r)
304
+ r.inject({}) do |m, kv|
305
+ k, v = *kv
306
+ m[k] = (tt = @transform[k]) ? tt[0][v] : v
307
+ m
308
+ end
309
+ end
310
+
311
+ # Applies the value transform for data saved to the database.
312
+ def transform_save(r)
313
+ r.inject({}) do |m, kv|
314
+ k, v = *kv
315
+ m[k] = (tt = @transform[k]) ? tt[1][v] : v
316
+ m
317
+ end
318
+ end
319
+
320
+ # Updates values for the dataset. The returned value is generally the
321
+ # number of rows updated, but that is adapter dependent.
322
+ def update(values={}, opts=(defarg=true;nil))
323
+ Deprecation.deprecate("Calling Dataset#update with an argument is deprecated and will raise an error in a future version. Use dataset.clone(opts).update.") unless defarg
324
+ execute_dui(defarg ? update_sql(values) : update_sql(value, opts))
325
+ end
326
+
327
+ # Add the mutation methods via metaprogramming
328
+ def_mutation_method(*MUTATION_METHODS)
329
+
330
+ protected
331
+
332
+ # Return true if the dataset has a non-nil value for any key in opts.
333
+ def options_overlap(opts)
334
+ !(@opts.collect{|k,v| k unless v.nil?}.compact & opts).empty?
335
+ end
336
+
337
+ private
338
+
339
+ # Runs #graph_each if graphing. Otherwise, iterates through the records
340
+ # yielded by #fetch_rows, applying any row_proc or transform if necessary,
341
+ # and yielding the result.
342
+ def _each(opts=(defarg=true;nil), &block)
343
+ if @opts[:graph] and !(opts && opts[:graph] == false)
344
+ defarg ? graph_each(&block) : graph_each(opts, &block)
345
+ else
346
+ row_proc = @row_proc unless opts && opts[:naked]
347
+ transform = @transform
348
+ fetch_rows(defarg ? select_sql : select_sql(opts)) do |r|
349
+ r = transform_load(r) if transform
350
+ r = row_proc[r] if row_proc
351
+ yield r
352
+ end
353
+ end
354
+ end
355
+
356
+ # Set the server to use to :default unless it is already set in the passed opts
357
+ def default_server_opts(opts)
358
+ {:server=>@opts[:server] || :default}.merge(opts)
359
+ end
360
+
361
+ # Execute the given SQL on the database using execute.
362
+ def execute(sql, opts={}, &block)
363
+ @db.execute(sql, {:server=>@opts[:server] || :read_only}.merge(opts), &block)
364
+ end
365
+
366
+ # Execute the given SQL on the database using execute_dui.
367
+ def execute_dui(sql, opts={}, &block)
368
+ @db.execute_dui(sql, default_server_opts(opts), &block)
369
+ end
370
+
371
+ # Execute the given SQL on the database using execute_insert.
372
+ def execute_insert(sql, opts={}, &block)
373
+ @db.execute_insert(sql, default_server_opts(opts), &block)
374
+ end
375
+
376
+ # Modify the identifier returned from the database based on the
377
+ # identifier_output_method.
378
+ def input_identifier(v)
379
+ (i = identifier_input_method) ? v.to_s.send(i) : v.to_s
380
+ end
381
+
382
+ # Modify the receiver with the results of sending the meth, args, and block
383
+ # to the receiver and merging the options of the resulting dataset into
384
+ # the receiver's options.
385
+ def mutation_method(meth, *args, &block)
386
+ copy = send(meth, *args, &block)
387
+ @opts.merge!(copy.opts)
388
+ self
389
+ end
390
+
391
+ # Modify the identifier returned from the database based on the
392
+ # identifier_output_method.
393
+ def output_identifier(v)
394
+ (i = identifier_output_method) ? v.to_s.send(i).to_sym : v.to_sym
395
+ end
396
+
397
+ # This is run inside .all, after all of the records have been loaded
398
+ # via .each, but before any block passed to all is called. It is called with
399
+ # a single argument, an array of all returned records. Does nothing by
400
+ # default, added to make the model eager loading code simpler.
401
+ def post_load(all_records)
402
+ end
403
+
404
+ # If a block argument is passed to a method that uses a VirtualRow,
405
+ # yield a new VirtualRow instance to the block if it accepts a single
406
+ # argument. Otherwise, evaluate the block in the context of a new
407
+ # VirtualRow instance.
408
+ def virtual_row_block_call(block)
409
+ return unless block
410
+ unless Sequel.virtual_row_instance_eval
411
+ Deprecation.deprecate('Using a VirtualRow block without an argument is deprecated, and its meaning will change in a future version. Add a block argument to keep the old semantics, or set Sequel.virtual_row_instance_eval = true to use instance_eval for VirtualRow blocks without arguments.') unless block.arity == 1
412
+ return block.call(SQL::VirtualRow.new)
413
+ end
414
+ case block.arity
415
+ when -1, 0
416
+ SQL::VirtualRow.new.instance_eval(&block)
417
+ else
418
+ block.call(SQL::VirtualRow.new)
419
+ end
420
+ end
421
+ end
422
+ end