sequel_core 1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. data/CHANGELOG +1003 -0
  2. data/COPYING +18 -0
  3. data/README +81 -0
  4. data/Rakefile +176 -0
  5. data/bin/sequel +41 -0
  6. data/lib/sequel_core.rb +59 -0
  7. data/lib/sequel_core/adapters/adapter_skeleton.rb +68 -0
  8. data/lib/sequel_core/adapters/ado.rb +100 -0
  9. data/lib/sequel_core/adapters/db2.rb +158 -0
  10. data/lib/sequel_core/adapters/dbi.rb +126 -0
  11. data/lib/sequel_core/adapters/informix.rb +87 -0
  12. data/lib/sequel_core/adapters/jdbc.rb +108 -0
  13. data/lib/sequel_core/adapters/mysql.rb +269 -0
  14. data/lib/sequel_core/adapters/odbc.rb +145 -0
  15. data/lib/sequel_core/adapters/odbc_mssql.rb +93 -0
  16. data/lib/sequel_core/adapters/openbase.rb +90 -0
  17. data/lib/sequel_core/adapters/oracle.rb +99 -0
  18. data/lib/sequel_core/adapters/postgres.rb +519 -0
  19. data/lib/sequel_core/adapters/sqlite.rb +192 -0
  20. data/lib/sequel_core/array_keys.rb +296 -0
  21. data/lib/sequel_core/connection_pool.rb +152 -0
  22. data/lib/sequel_core/core_ext.rb +59 -0
  23. data/lib/sequel_core/core_sql.rb +191 -0
  24. data/lib/sequel_core/database.rb +433 -0
  25. data/lib/sequel_core/dataset.rb +409 -0
  26. data/lib/sequel_core/dataset/convenience.rb +321 -0
  27. data/lib/sequel_core/dataset/sequelizer.rb +354 -0
  28. data/lib/sequel_core/dataset/sql.rb +586 -0
  29. data/lib/sequel_core/exceptions.rb +45 -0
  30. data/lib/sequel_core/migration.rb +191 -0
  31. data/lib/sequel_core/model.rb +8 -0
  32. data/lib/sequel_core/pretty_table.rb +73 -0
  33. data/lib/sequel_core/schema.rb +8 -0
  34. data/lib/sequel_core/schema/schema_generator.rb +131 -0
  35. data/lib/sequel_core/schema/schema_sql.rb +131 -0
  36. data/lib/sequel_core/worker.rb +58 -0
  37. data/spec/adapters/informix_spec.rb +139 -0
  38. data/spec/adapters/mysql_spec.rb +330 -0
  39. data/spec/adapters/oracle_spec.rb +130 -0
  40. data/spec/adapters/postgres_spec.rb +189 -0
  41. data/spec/adapters/sqlite_spec.rb +345 -0
  42. data/spec/array_keys_spec.rb +679 -0
  43. data/spec/connection_pool_spec.rb +356 -0
  44. data/spec/core_ext_spec.rb +67 -0
  45. data/spec/core_sql_spec.rb +301 -0
  46. data/spec/database_spec.rb +812 -0
  47. data/spec/dataset_spec.rb +2381 -0
  48. data/spec/migration_spec.rb +261 -0
  49. data/spec/pretty_table_spec.rb +66 -0
  50. data/spec/rcov.opts +4 -0
  51. data/spec/schema_generator_spec.rb +86 -0
  52. data/spec/schema_spec.rb +230 -0
  53. data/spec/sequelizer_spec.rb +448 -0
  54. data/spec/spec.opts +5 -0
  55. data/spec/spec_helper.rb +44 -0
  56. data/spec/worker_spec.rb +96 -0
  57. metadata +162 -0
@@ -0,0 +1,409 @@
1
+ require 'time'
2
+ require 'date'
3
+ require 'yaml'
4
+
5
+ require File.join(File.dirname(__FILE__), 'dataset/sql')
6
+ require File.join(File.dirname(__FILE__), 'dataset/sequelizer')
7
+ require File.join(File.dirname(__FILE__), 'dataset/convenience')
8
+
9
+ module Sequel
10
+ # A Dataset represents a view of a the data in a database, constrained by
11
+ # specific parameters such as filtering conditions, order, etc. Datasets
12
+ # can be used to create, retrieve, update and delete records.
13
+ #
14
+ # Query results are always retrieved on demand, so a dataset can be kept
15
+ # around and reused indefinitely:
16
+ # my_posts = DB[:posts].filter(:author => 'david') # no records are retrieved
17
+ # p my_posts.all # records are now retrieved
18
+ # ...
19
+ # p my_posts.all # records are retrieved again
20
+ #
21
+ # In order to provide this functionality, dataset methods such as where,
22
+ # select, order, etc. return modified copies of the dataset, so you can
23
+ # use different datasets to access data:
24
+ # posts = DB[:posts]
25
+ # davids_posts = posts.filter(:author => 'david')
26
+ # old_posts = posts.filter('stamp < ?', 1.week.ago)
27
+ #
28
+ # Datasets are Enumerable objects, so they can be manipulated using any
29
+ # of the Enumerable methods, such as map, inject, etc.
30
+ #
31
+ # === The Dataset Adapter Interface
32
+ #
33
+ # Each adapter should define its own dataset class as a descendant of
34
+ # Sequel::Dataset. The following methods should be overriden by the adapter
35
+ # Dataset class (each method with the stock implementation):
36
+ #
37
+ # # Iterate over the results of the SQL query and call the supplied
38
+ # # block with each record (as a hash).
39
+ # def fetch_rows(sql, &block)
40
+ # @db.synchronize do
41
+ # r = @db.execute(sql)
42
+ # r.each(&block)
43
+ # end
44
+ # end
45
+ #
46
+ # # Insert records.
47
+ # def insert(*values)
48
+ # @db.synchronize do
49
+ # @db.execute(insert_sql(*values)).last_insert_id
50
+ # end
51
+ # end
52
+ #
53
+ # # Update records.
54
+ # def update(*args, &block)
55
+ # @db.synchronize do
56
+ # @db.execute(update_sql(*args, &block)).affected_rows
57
+ # end
58
+ # end
59
+ #
60
+ # # Delete records.
61
+ # def delete(opts = nil)
62
+ # @db.synchronize do
63
+ # @db.execute(delete_sql(opts)).affected_rows
64
+ # end
65
+ # end
66
+ class Dataset
67
+ include Enumerable
68
+ include Sequelizer
69
+ include SQL
70
+ include Convenience
71
+
72
+ attr_reader :db
73
+ attr_accessor :opts
74
+
75
+ alias all to_a
76
+ alias size count
77
+
78
+ # Constructs a new instance of a dataset with a database instance, initial
79
+ # options and an optional record class. Datasets are usually constructed by
80
+ # invoking Database methods:
81
+ # DB[:posts]
82
+ # Or:
83
+ # DB.dataset # the returned dataset is blank
84
+ #
85
+ # Sequel::Dataset is an abstract class that is not useful by itself. Each
86
+ # database adaptor should provide a descendant class of Sequel::Dataset.
87
+ def initialize(db, opts = nil)
88
+ @db = db
89
+ @opts = opts || {}
90
+ @row_proc = nil
91
+ @transform = nil
92
+ end
93
+
94
+ # Returns a new instance of the dataset with with the give options merged.
95
+ def clone_merge(opts)
96
+ new_dataset = clone
97
+ new_dataset.set_options(@opts.merge(opts))
98
+ new_dataset
99
+ end
100
+
101
+ def set_options(opts) #:nodoc:
102
+ @opts = opts
103
+ @columns = nil
104
+ end
105
+
106
+ NOTIMPL_MSG = "This method must be overriden in Sequel adapters".freeze
107
+
108
+ # Executes a select query and fetches records, passing each record to the
109
+ # supplied block. Adapters should override this method.
110
+ def fetch_rows(sql, &block)
111
+ # @db.synchronize do
112
+ # r = @db.execute(sql)
113
+ # r.each(&block)
114
+ # end
115
+ raise NotImplementedError, NOTIMPL_MSG
116
+ end
117
+
118
+ # Inserts values into the associated table. Adapters should override this
119
+ # method.
120
+ def insert(*values)
121
+ # @db.synchronize do
122
+ # @db.execute(insert_sql(*values)).last_insert_id
123
+ # end
124
+ raise NotImplementedError, NOTIMPL_MSG
125
+ end
126
+
127
+ # Updates values for the dataset. Adapters should override this method.
128
+ def update(values, opts = nil)
129
+ # @db.synchronize do
130
+ # @db.execute(update_sql(values, opts)).affected_rows
131
+ # end
132
+ raise NotImplementedError, NOTIMPL_MSG
133
+ end
134
+
135
+ # Deletes the records in the dataset. Adapters should override this method.
136
+ def delete(opts = nil)
137
+ # @db.synchronize do
138
+ # @db.execute(delete_sql(opts)).affected_rows
139
+ # end
140
+ raise NotImplementedError, NOTIMPL_MSG
141
+ end
142
+
143
+ # Returns the columns in the result set in their true order. The stock
144
+ # implementation returns the content of @columns. If @columns is nil,
145
+ # a query is performed. Adapters are expected to fill @columns with the
146
+ # column information when a query is performed.
147
+ def columns
148
+ first unless @columns
149
+ @columns || []
150
+ end
151
+
152
+ # Inserts the supplied values into the associated table.
153
+ def <<(*args)
154
+ insert(*args)
155
+ end
156
+
157
+ # Updates the dataset with the given values.
158
+ def set(*args, &block)
159
+ update(*args, &block)
160
+ end
161
+
162
+ # Iterates over the records in the dataset
163
+ def each(opts = nil, &block)
164
+ fetch_rows(select_sql(opts), &block)
165
+ self
166
+ end
167
+
168
+ # Returns the the model classes associated with the dataset as a hash.
169
+ def model_classes
170
+ @opts[:models]
171
+ end
172
+
173
+ # Returns the column name for the polymorphic key.
174
+ def polymorphic_key
175
+ @opts[:polymorphic_key]
176
+ end
177
+
178
+ # Returns a naked dataset clone - i.e. a dataset that returns records as
179
+ # hashes rather than model objects.
180
+ def naked
181
+ d = clone_merge(:naked => true, :models => nil, :polymorphic_key => nil)
182
+ d.set_model(nil)
183
+ d
184
+ end
185
+
186
+ # Associates or disassociates the dataset with a model. If no argument or
187
+ # nil is specified, the dataset is turned into a naked dataset and returns
188
+ # records as hashes. If a model class specified, the dataset is modified
189
+ # to return records as instances of the model class, e.g:
190
+ #
191
+ # class MyModel
192
+ # def initialize(values)
193
+ # @values = values
194
+ # ...
195
+ # end
196
+ # end
197
+ #
198
+ # dataset.set_model(MyModel)
199
+ #
200
+ # You can also provide additional arguments to be passed to the model's
201
+ # initialize method:
202
+ #
203
+ # class MyModel
204
+ # def initialize(values, options)
205
+ # @values = values
206
+ # ...
207
+ # end
208
+ # end
209
+ #
210
+ # dataset.set_model(MyModel, :allow_delete => false)
211
+ #
212
+ # The dataset can be made polymorphic by specifying a column name as the
213
+ # polymorphic key and a hash mapping column values to model classes.
214
+ #
215
+ # dataset.set_model(:kind, {1 => Person, 2 => Business})
216
+ #
217
+ # You can also set a default model class to fall back on by specifying a
218
+ # class corresponding to nil:
219
+ #
220
+ # dataset.set_model(:kind, {nil => DefaultClass, 1 => Person, 2 => Business})
221
+ #
222
+ # To disassociate a model from the dataset, you can call the #set_model
223
+ # and specify nil as the class:
224
+ #
225
+ # dataset.set_model(nil)
226
+ #
227
+ def set_model(key, *args)
228
+ # pattern matching
229
+ case key
230
+ when nil # set_model(nil) => no
231
+ # no argument provided, so the dataset is denuded
232
+ @opts.merge!(:naked => true, :models => nil, :polymorphic_key => nil)
233
+ remove_row_proc
234
+ # extend_with_stock_each
235
+ when Class
236
+ # isomorphic model
237
+ @opts.merge!(:naked => nil, :models => {nil => key}, :polymorphic_key => nil)
238
+ set_row_proc {|h| key.new(h, *args)}
239
+ extend_with_destroy
240
+ when Symbol
241
+ # polymorphic model
242
+ hash = args.shift || raise(ArgumentError, "No class hash supplied for polymorphic model")
243
+ @opts.merge!(:naked => true, :models => hash, :polymorphic_key => key)
244
+ set_row_proc do |h|
245
+ c = hash[h[key]] || hash[nil] || \
246
+ raise(Error, "No matching model class for record (#{polymorphic_key} => #{h[polymorphic_key].inspect})")
247
+ c.new(h, *args)
248
+ end
249
+ extend_with_destroy
250
+ else
251
+ raise ArgumentError, "Invalid model specified"
252
+ end
253
+ self
254
+ end
255
+
256
+ # Overrides the each method to pass the values through a filter. The filter
257
+ # receives as argument a hash containing the column values for the current
258
+ # record. The filter should return a value which is then passed to the
259
+ # iterating block. In order to elucidate, here's a contrived example:
260
+ #
261
+ # dataset.set_row_proc {|h| h.merge(:xxx => 'yyy')}
262
+ # dataset.first[:xxx] #=> "yyy" # always!
263
+ #
264
+ def set_row_proc(&filter)
265
+ @row_proc = filter
266
+ update_each_method
267
+ end
268
+
269
+ # Removes the row making proc.
270
+ def remove_row_proc
271
+ @row_proc = nil
272
+ update_each_method
273
+ end
274
+
275
+ STOCK_TRANSFORMS = {
276
+ :marshal => [proc {|v| Marshal.load(v)}, proc {|v| Marshal.dump(v)}],
277
+ :yaml => [proc {|v| YAML.load v if v}, proc {|v| v.to_yaml}]
278
+ }
279
+
280
+ # Sets a value transform which is used to convert values loaded and saved
281
+ # to/from the database. The transform should be supplied as a hash. Each
282
+ # value in the hash should be an array containing two proc objects - one
283
+ # for transforming loaded values, and one for transforming saved values.
284
+ # The following example demonstrates how to store Ruby objects in a dataset
285
+ # using Marshal serialization:
286
+ #
287
+ # dataset.transform(:obj => [
288
+ # proc {|v| Marshal.load(v)},
289
+ # proc {|v| Marshal.dump(v)}
290
+ # ])
291
+ #
292
+ # dataset.insert_sql(:obj => 1234) #=>
293
+ # "INSERT INTO items (obj) VALUES ('\004\bi\002\322\004')"
294
+ #
295
+ # Another form of using transform is by specifying stock transforms:
296
+ #
297
+ # dataset.transform(:obj => :marshal)
298
+ #
299
+ # The currently supported stock transforms are :marshal and :yaml.
300
+ def transform(t)
301
+ @transform = t
302
+ t.each do |k, v|
303
+ case v
304
+ when Array
305
+ if (v.size != 2) || !v.first.is_a?(Proc) && !v.last.is_a?(Proc)
306
+ raise Error::InvalidTransform, "Invalid transform specified"
307
+ end
308
+ else
309
+ unless v = STOCK_TRANSFORMS[v]
310
+ raise Error::InvalidTransform, "Invalid transform specified"
311
+ else
312
+ t[k] = v
313
+ end
314
+ end
315
+ end
316
+ update_each_method
317
+ self
318
+ end
319
+
320
+ # Applies the value transform for data loaded from the database.
321
+ def transform_load(r)
322
+ @transform.each do |k, tt|
323
+ if r.has_key?(k)
324
+ r[k] = tt[0][r[k]]
325
+ end
326
+ end
327
+ r
328
+ end
329
+
330
+ # Applies the value transform for data saved to the database.
331
+ def transform_save(r)
332
+ @transform.each do |k, tt|
333
+ if r.has_key?(k)
334
+ r[k] = tt[1][r[k]]
335
+ end
336
+ end
337
+ r
338
+ end
339
+
340
+ # Updates the each method according to whether @row_proc and @transform are
341
+ # set or not.
342
+ def update_each_method
343
+ # warning: ugly code generation ahead
344
+ if @row_proc && @transform
345
+ class << self
346
+ def each(opts = nil, &block)
347
+ if opts && opts[:naked]
348
+ fetch_rows(select_sql(opts)) {|r| block[transform_load(r)]}
349
+ else
350
+ fetch_rows(select_sql(opts)) {|r| block[@row_proc[transform_load(r)]]}
351
+ end
352
+ self
353
+ end
354
+ end
355
+ elsif @row_proc
356
+ class << self
357
+ def each(opts = nil, &block)
358
+ if opts && opts[:naked]
359
+ fetch_rows(select_sql(opts), &block)
360
+ else
361
+ fetch_rows(select_sql(opts)) {|r| block[@row_proc[r]]}
362
+ end
363
+ self
364
+ end
365
+ end
366
+ elsif @transform
367
+ class << self
368
+ def each(opts = nil, &block)
369
+ fetch_rows(select_sql(opts)) {|r| block[transform_load(r)]}
370
+ self
371
+ end
372
+ end
373
+ else
374
+ class << self
375
+ def each(opts = nil, &block)
376
+ fetch_rows(select_sql(opts), &block)
377
+ self
378
+ end
379
+ end
380
+ end
381
+ end
382
+
383
+ # Extends the dataset with a destroy method, that calls destroy for each
384
+ # record in the dataset.
385
+ def extend_with_destroy
386
+ unless respond_to?(:destroy)
387
+ meta_def(:destroy) do
388
+ unless @opts[:models]
389
+ raise Error, "No model associated with this dataset"
390
+ end
391
+ count = 0
392
+ @db.transaction {each {|r| count += 1; r.destroy}}
393
+ count
394
+ end
395
+ end
396
+ end
397
+
398
+ @@dataset_classes = []
399
+
400
+ def self.dataset_classes #:nodoc:
401
+ @@dataset_classes
402
+ end
403
+
404
+ def self.inherited(c) #:nodoc:
405
+ @@dataset_classes << c
406
+ end
407
+ end
408
+ end
409
+
@@ -0,0 +1,321 @@
1
+ require 'enumerator'
2
+
3
+ module Sequel
4
+ class Dataset
5
+ module Convenience
6
+ # Iterates through each record, converting it into a hash.
7
+ def each_hash(&block)
8
+ each {|a| block[a.to_hash]}
9
+ end
10
+
11
+ # Returns true if the record count is 0
12
+ def empty?
13
+ count == 0
14
+ end
15
+
16
+ # Returns the first record in the dataset.
17
+ def single_record(opts = nil)
18
+ each(opts) {|r| return r}
19
+ nil
20
+ end
21
+
22
+ NAKED_HASH = {:naked => true}.freeze
23
+
24
+ # Returns the first value of the first reecord in the dataset.
25
+ # Returns nill if dataset is empty.
26
+ def single_value(opts = nil)
27
+ opts = opts ? NAKED_HASH.merge(opts) : NAKED_HASH
28
+ # reset the columns cache so it won't fuck subsequent calls to columns
29
+ each(opts) {|r| @columns = nil; return r.values.first}
30
+ nil
31
+ end
32
+
33
+ # Returns the first record in the dataset. If the num argument is specified,
34
+ # an array is returned with the first <i>num</i> records.
35
+ def first(*args, &block)
36
+ if block
37
+ return filter(&block).single_record(:limit => 1)
38
+ end
39
+ args = args.empty? ? 1 : (args.size == 1) ? args.first : args
40
+ case args
41
+ when 1
42
+ single_record(:limit => 1)
43
+ when Fixnum
44
+ limit(args).all
45
+ else
46
+ filter(args, &block).single_record(:limit => 1)
47
+ end
48
+ end
49
+
50
+ # Returns the first record matching the condition.
51
+ def [](*conditions)
52
+ first(*conditions)
53
+ end
54
+
55
+ def []=(conditions, values)
56
+ filter(conditions).update(values)
57
+ end
58
+
59
+ # Returns the last records in the dataset by inverting the order. If no
60
+ # order is given, an exception is raised. If num is not given, the last
61
+ # record is returned. Otherwise an array is returned with the last
62
+ # <i>num</i> records.
63
+ def last(*args)
64
+ raise Error, 'No order specified' unless
65
+ @opts[:order] || (opts && opts[:order])
66
+
67
+ args = args.empty? ? 1 : (args.size == 1) ? args.first : args
68
+
69
+ case args
70
+ when Fixnum
71
+ l = {:limit => args}
72
+ opts = {:order => invert_order(@opts[:order])}. \
73
+ merge(opts ? opts.merge(l) : l)
74
+ if args == 1
75
+ single_record(opts)
76
+ else
77
+ clone_merge(opts).all
78
+ end
79
+ else
80
+ filter(args).last(1)
81
+ end
82
+ end
83
+
84
+ # Maps column values for each record in the dataset (if a column name is
85
+ # given), or performs the stock mapping functionality of Enumerable.
86
+ def map(column_name = nil, &block)
87
+ if column_name
88
+ super() {|r| r[column_name]}
89
+ else
90
+ super(&block)
91
+ end
92
+ end
93
+
94
+ # Returns a hash with one column used as key and another used as value.
95
+ def to_hash(key_column, value_column)
96
+ inject({}) do |m, r|
97
+ m[r[key_column]] = r[value_column]
98
+ m
99
+ end
100
+ end
101
+
102
+ # Returns a paginated dataset. The resulting dataset also provides the
103
+ # total number of pages (Dataset#page_count) and the current page number
104
+ # (Dataset#current_page), as well as Dataset#prev_page and Dataset#next_page
105
+ # for implementing pagination controls.
106
+ def paginate(page_no, page_size)
107
+ record_count = count
108
+ total_pages = (record_count / page_size.to_f).ceil
109
+ paginated = limit(page_size, (page_no - 1) * page_size)
110
+ paginated.set_pagination_info(page_no, page_size, record_count)
111
+ paginated
112
+ end
113
+
114
+ # Sets the pagination info
115
+ def set_pagination_info(page_no, page_size, record_count)
116
+ @current_page = page_no
117
+ @page_size = page_size
118
+ @pagination_record_count = record_count
119
+ @page_count = (record_count / page_size.to_f).ceil
120
+ end
121
+
122
+ attr_accessor :page_size, :page_count, :current_page, :pagination_record_count
123
+
124
+ # Returns the previous page number or nil if the current page is the first
125
+ def prev_page
126
+ current_page > 1 ? (current_page - 1) : nil
127
+ end
128
+
129
+ # Returns the next page number or nil if the current page is the last page
130
+ def next_page
131
+ current_page < page_count ? (current_page + 1) : nil
132
+ end
133
+
134
+ # Returns the page range
135
+ def page_range
136
+ 1..page_count
137
+ end
138
+
139
+ # Returns the record range for the current page
140
+ def current_page_record_range
141
+ return (0..0) if @current_page > @page_count
142
+
143
+ a = 1 + (@current_page - 1) * @page_size
144
+ b = a + @page_size - 1
145
+ b = @pagination_record_count if b > @pagination_record_count
146
+ a..b
147
+ end
148
+
149
+ # Returns the number of records in the current page
150
+ def current_page_record_count
151
+ return 0 if @current_page > @page_count
152
+
153
+ a = 1 + (@current_page - 1) * @page_size
154
+ b = a + @page_size - 1
155
+ b = @pagination_record_count if b > @pagination_record_count
156
+ b - a + 1
157
+ end
158
+
159
+ # Returns the minimum value for the given column.
160
+ def min(column)
161
+ single_value(:select => [column.MIN.AS(:v)])
162
+ end
163
+
164
+ # Returns the maximum value for the given column.
165
+ def max(column)
166
+ single_value(:select => [column.MAX.AS(:v)])
167
+ end
168
+
169
+ # Returns the sum for the given column.
170
+ def sum(column)
171
+ single_value(:select => [column.SUM.AS(:v)])
172
+ end
173
+
174
+ # Returns the average value for the given column.
175
+ def avg(column)
176
+ single_value(:select => [column.AVG.AS(:v)])
177
+ end
178
+
179
+ # Returns a dataset grouped by the given column with count by group.
180
+ def group_and_count(column)
181
+ group(column).select(column, :count[column].AS(:count)).order(:count)
182
+ end
183
+
184
+ # Returns a Range object made from the minimum and maximum values for the
185
+ # given column.
186
+ def range(column)
187
+ r = select(column.MIN.AS(:v1), column.MAX.AS(:v2)).first
188
+ r && (r[:v1]..r[:v2])
189
+ end
190
+
191
+ # Returns the interval between minimum and maximum values for the given
192
+ # column.
193
+ def interval(column)
194
+ r = select("(max(#{literal(column)}) - min(#{literal(column)})) AS v".lit).first
195
+ r && r[:v]
196
+ end
197
+
198
+ # Pretty prints the records in the dataset as plain-text table.
199
+ def print(*cols)
200
+ Sequel::PrettyTable.print(naked.all, cols.empty? ? columns : cols)
201
+ end
202
+
203
+ COMMA_SEPARATOR = ', '.freeze
204
+
205
+ # Returns a string in CSV format containing the dataset records. By
206
+ # default the CSV representation includes the column titles in the
207
+ # first line. You can turn that off by passing false as the
208
+ # include_column_titles argument.
209
+ def to_csv(include_column_titles = true)
210
+ records = naked.to_a
211
+ csv = ''
212
+ if include_column_titles
213
+ csv << "#{@columns.join(COMMA_SEPARATOR)}\r\n"
214
+ end
215
+ records.each {|r| csv << "#{r.join(COMMA_SEPARATOR)}\r\n"}
216
+ csv
217
+ end
218
+
219
+ # Inserts multiple records into the associated table. This method can be
220
+ # to efficiently insert a large amounts of records into a table. Inserts
221
+ # are automatically wrapped in a transaction. If the :commit_every
222
+ # option is specified, the method will generate a separate transaction
223
+ # for each batch of records, e.g.:
224
+ #
225
+ # dataset.multi_insert(list, :commit_every => 1000)
226
+ def multi_insert(list, opts = {})
227
+ if every = opts[:commit_every]
228
+ list.each_slice(every) do |s|
229
+ @db.transaction do
230
+ s.each {|r| @db.execute(insert_sql(r))}
231
+ # @db.execute(s.map {|r| insert_sql(r)}.join)
232
+ end
233
+ end
234
+ else
235
+ @db.transaction do
236
+ # @db.execute(list.map {|r| insert_sql(r)}.join)
237
+ list.each {|r| @db.execute(insert_sql(r))}
238
+ end
239
+ end
240
+ end
241
+
242
+ module QueryBlockCopy #:nodoc:
243
+ def each(*args); raise Error, "#each cannot be invoked inside a query block."; end
244
+ def insert(*args); raise Error, "#insert cannot be invoked inside a query block."; end
245
+ def update(*args); raise Error, "#update cannot be invoked inside a query block."; end
246
+ def delete(*args); raise Error, "#delete cannot be invoked inside a query block."; end
247
+
248
+ def clone_merge(opts)
249
+ @opts.merge!(opts)
250
+ end
251
+ end
252
+
253
+ # Translates a query block into a dataset. Query blocks can be useful
254
+ # when expressing complex SELECT statements, e.g.:
255
+ #
256
+ # dataset = DB[:items].query do
257
+ # select :x, :y, :z
258
+ # where {:x > 1 && :y > 2}
259
+ # order_by :z.DESC
260
+ # end
261
+ #
262
+ def query(&block)
263
+ copy = clone_merge({})
264
+ copy.extend(QueryBlockCopy)
265
+ copy.instance_eval(&block)
266
+ clone_merge(copy.opts)
267
+ end
268
+
269
+ MUTATION_RE = /^(.+)!$/.freeze
270
+
271
+ # Provides support for mutation methods (filter!, order!, etc.) and magic
272
+ # methods.
273
+ def method_missing(m, *args, &block)
274
+ if m.to_s =~ MUTATION_RE
275
+ m = $1.to_sym
276
+ super unless respond_to?(m)
277
+ copy = send(m, *args, &block)
278
+ super if copy.class != self.class
279
+ @opts.merge!(copy.opts)
280
+ self
281
+ elsif magic_method_missing(m)
282
+ send(m, *args)
283
+ else
284
+ super
285
+ end
286
+ end
287
+
288
+ MAGIC_METHODS = {
289
+ /^order_by_(.+)$/ => proc {|c| proc {order(c)}},
290
+ /^first_by_(.+)$/ => proc {|c| proc {order(c).first}},
291
+ /^last_by_(.+)$/ => proc {|c| proc {order(c).last}},
292
+ /^filter_by_(.+)$/ => proc {|c| proc {|v| filter(c => v)}},
293
+ /^all_by_(.+)$/ => proc {|c| proc {|v| filter(c => v).all}},
294
+ /^find_by_(.+)$/ => proc {|c| proc {|v| filter(c => v).first}},
295
+ /^group_by_(.+)$/ => proc {|c| proc {group(c)}},
296
+ /^count_by_(.+)$/ => proc {|c| proc {group_and_count(c)}}
297
+ }
298
+
299
+ # Checks if the given method name represents a magic method and
300
+ # defines it. Otherwise, nil is returned.
301
+ def magic_method_missing(m)
302
+ method_name = m.to_s
303
+ MAGIC_METHODS.each_pair do |r, p|
304
+ if method_name =~ r
305
+ impl = p[$1.to_sym]
306
+ return Dataset.class_def(m, &impl)
307
+ end
308
+ end
309
+ nil
310
+ end
311
+
312
+ def create_view(name)
313
+ @db.create_view(name, self)
314
+ end
315
+
316
+ def create_or_replace_view(name)
317
+ @db.create_or_replace_view(name, self)
318
+ end
319
+ end
320
+ end
321
+ end