sequel_core 1.4.0 → 1.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. data/CHANGELOG +74 -0
  2. data/COPYING +1 -0
  3. data/README +17 -6
  4. data/Rakefile +16 -21
  5. data/lib/sequel_core.rb +18 -28
  6. data/lib/sequel_core/adapters/ado.rb +3 -15
  7. data/lib/sequel_core/adapters/dbi.rb +1 -14
  8. data/lib/sequel_core/adapters/informix.rb +3 -3
  9. data/lib/sequel_core/adapters/jdbc.rb +2 -2
  10. data/lib/sequel_core/adapters/mysql.rb +39 -59
  11. data/lib/sequel_core/adapters/odbc.rb +18 -38
  12. data/lib/sequel_core/adapters/openbase.rb +1 -17
  13. data/lib/sequel_core/adapters/oracle.rb +1 -19
  14. data/lib/sequel_core/adapters/postgres.rb +20 -60
  15. data/lib/sequel_core/adapters/sqlite.rb +4 -8
  16. data/lib/sequel_core/connection_pool.rb +150 -0
  17. data/lib/sequel_core/core_ext.rb +41 -0
  18. data/lib/sequel_core/core_sql.rb +35 -38
  19. data/lib/sequel_core/database.rb +20 -17
  20. data/lib/sequel_core/dataset.rb +49 -80
  21. data/lib/sequel_core/dataset/callback.rb +11 -13
  22. data/lib/sequel_core/dataset/convenience.rb +18 -136
  23. data/lib/sequel_core/dataset/pagination.rb +81 -0
  24. data/lib/sequel_core/dataset/sequelizer.rb +5 -4
  25. data/lib/sequel_core/dataset/sql.rb +43 -33
  26. data/lib/sequel_core/deprecated.rb +200 -0
  27. data/lib/sequel_core/exceptions.rb +0 -14
  28. data/lib/sequel_core/object_graph.rb +199 -0
  29. data/lib/sequel_core/pretty_table.rb +27 -24
  30. data/lib/sequel_core/schema/generator.rb +16 -4
  31. data/lib/sequel_core/schema/sql.rb +5 -3
  32. data/lib/sequel_core/worker.rb +1 -1
  33. data/spec/adapters/informix_spec.rb +1 -47
  34. data/spec/adapters/mysql_spec.rb +85 -54
  35. data/spec/adapters/oracle_spec.rb +1 -57
  36. data/spec/adapters/postgres_spec.rb +66 -49
  37. data/spec/adapters/sqlite_spec.rb +4 -29
  38. data/spec/connection_pool_spec.rb +358 -0
  39. data/spec/core_sql_spec.rb +24 -19
  40. data/spec/database_spec.rb +13 -9
  41. data/spec/dataset_spec.rb +59 -78
  42. data/spec/object_graph_spec.rb +202 -0
  43. data/spec/pretty_table_spec.rb +1 -9
  44. data/spec/schema_generator_spec.rb +7 -1
  45. data/spec/schema_spec.rb +27 -0
  46. data/spec/sequelizer_spec.rb +2 -2
  47. data/spec/spec_helper.rb +4 -2
  48. metadata +16 -57
  49. data/lib/sequel_core/array_keys.rb +0 -322
  50. data/lib/sequel_core/model.rb +0 -8
  51. data/spec/array_keys_spec.rb +0 -682
data/CHANGELOG CHANGED
@@ -1,3 +1,77 @@
1
+ === HEAD
2
+
3
+ * Set a timeout in the Sqlite adapter, default to 5 seconds (hrvoje.marjanovic) (#218)
4
+
5
+ * Document that calling Sequel::ODBC::Database#execute manually requires you to manually drop the returned object (jeremyevans) (#217)
6
+
7
+ * Paginating an already paginated/limited dataset now raises an error (jeremyevans)
8
+
9
+ * Add support for PostgreSQL partial indexes (dlee)
10
+
11
+ * Added support for arbitrary index types (including spatial indexes) (dlee)
12
+
13
+ * Quote column names in SQL generated for SQLite (tmm1)
14
+
15
+ * Deprecate Object#rollback! (jeremyevans)
16
+
17
+ * Make some Dataset methods private (qualified_column_name, column_list, table_ref, source_list) (jeremyevans)
18
+
19
+ * Deprecate Dataset methods #set_options, #set_row_proc, #remove_row_proc, and #clone_merge (jeremyevans)
20
+
21
+ * Add Symbol#*, a replacement for Symbol#all (jeremyevans)
22
+
23
+ * Deprecate including ColumnMethods in Object, include it in Symbol, String, and Sequel::SQL::Expression (jeremyevans)
24
+
25
+ * Deprecate Symbol#method_missing, and #AS, #DESC, #ASC, #ALL, and #all from ColumnMethods (jeremyevans)
26
+
27
+ * Fix table joining in MySQL (jeremyevans)
28
+
29
+ * Deprecate Sequel.method_missing and Object#Sequel, add real Sequel.adapter methods (jeremyevans)
30
+
31
+ * Move dataset methods applicable only to paginated datasets into Sequel::Dataset::Pagination (jeremyevans)
32
+
33
+ * Make Sequel::Dataset::Sequelizer methods private (jeremyevans)
34
+
35
+ * Deprecate Dataset#method_missing, add real mutation methods (e.g. filter!) (jeremyevans)
36
+
37
+ * Fix connecting to an MSSQL server via ODBC using domain user credentials (jeremyevans) (#216)
38
+
39
+ * No longer depend on the assistance gem, merge in the ConnectionPool and .blank methods (jeremyevans)
40
+
41
+ * No longer depend on ParseTree, RubyInline, or ruby2ruby, but you still need them if you want to use the block filters (jeremyevans)
42
+
43
+ * Fix JDBC adapter by issuing index things start at 1 (pdamer)
44
+
45
+ * Fix connecting to a database via the ADO adapter (now requires options instead of URI) (timuckun, jeremyevans) (#204)
46
+
47
+ * Support storing microseconds in postgres timestamp fields (schnarch...@rootimage.msu.edu) (#215)
48
+
49
+ * Allow joining of multiple datasets, by making the table alias different for each dataset joined (jeremyevans)
50
+
51
+ * SECURITY: Fix backslash escaping of strings (dlee)
52
+
53
+ * Add ability to create a graph of objects from a query, with the result split into corresponding tables (jeremyevans) (#113)
54
+
55
+ * Add attr_accessor for dataset row_proc (jeremyevans)
56
+
57
+ * Don't redefine Dataset#each when adding a transform or row_proc (jeremyevans)
58
+
59
+ * Remove array_keys.rb from sequel_core, it was partially broken (since the arrays came from hashes), and redefined Dataset#each (jeremyevans)
60
+
61
+ * Fix MySQL default values insert (matt.binary) (#196)
62
+
63
+ * Fix ODBC adapter improperly escaping date and timestamp values (leo.borisenko) (#165)
64
+
65
+ * Fix renaming columns on MySQL with type :varchar (jeremyevans) (#206)
66
+
67
+ * Add Sequel::SQL::Function#==, for comparing SQL Functions (jeremyevans) (#209)
68
+
69
+ * Update Informix adapter to work with Ruby/Informix 0.7.0 (gerardo.santana@gmail.com)
70
+
71
+ * Remove sequel_core's knowledge of Sequel::Model (jeremyevans)
72
+
73
+ * Use "\n" instead of $/ (since $/ can be redefined in ways we do not want) (jeremyevans)
74
+
1
75
  === 1.4.0 (2008-04-08)
2
76
 
3
77
  * Merge 3 mysql patches from the bugtracker (mvyver) (#200, #201, #202).
data/COPYING CHANGED
@@ -1,4 +1,5 @@
1
1
  Copyright (c) 2007-2008 Sharon Rosner
2
+ Copyright (c) 2008 Jeremy Evans
2
3
 
3
4
  Permission is hereby granted, free of charge, to any person obtaining a copy
4
5
  of this software and associated documentation files (the "Software"), to
data/README CHANGED
@@ -158,19 +158,19 @@ The simplest way to filter records is to provide a hash of values to match:
158
158
 
159
159
  You can also specify ranges:
160
160
 
161
- my_posts = posts.filter(:stamp => (2.weeks.ago)..(1.week.ago))
161
+ my_posts = posts.filter(:stamp => (Date.today - 14)..(Date.today - 7))
162
162
 
163
163
  Or lists of values:
164
164
 
165
165
  my_posts = posts.filter(:category => ['ruby', 'postgres', 'linux'])
166
166
 
167
- Sequel now also accepts expressions as closures, AKA block filters:
167
+ If ParseTree is installed, Sequel also accepts expressions as closures, AKA block filters:
168
168
 
169
169
  my_posts = posts.filter {:category == ['ruby', 'postgres', 'linux']}
170
170
 
171
171
  Which also lets you do stuff like:
172
172
 
173
- my_posts = posts.filter {:stamp > 1.month.ago}
173
+ my_posts = posts.filter {:stamp > Date.today << 1}
174
174
 
175
175
  Some adapters (like postgresql) will also let you specify Regexps:
176
176
 
@@ -186,7 +186,7 @@ You can then retrieve the records by using any of the retrieval methods:
186
186
 
187
187
  You can also specify a custom WHERE clause:
188
188
 
189
- posts.filter('(stamp < ?) AND (author <> ?)', 3.days.ago, author_name)
189
+ posts.filter('(stamp < ?) AND (author <> ?)', Date.today - 3, author_name)
190
190
 
191
191
  Datasets can also be used as subqueries:
192
192
 
@@ -213,7 +213,7 @@ You can also specify descending order
213
213
 
214
214
  === Deleting Records
215
215
 
216
- posts.filter('stamp < ?', 3.days.ago).delete
216
+ posts.filter('stamp < ?', Date.today - 3).delete
217
217
 
218
218
  === Inserting Records
219
219
 
@@ -225,7 +225,7 @@ Or alternatively:
225
225
 
226
226
  === Updating Records
227
227
 
228
- posts.filter('stamp < ?', 3.days.ago).update(:state => 'archived')
228
+ posts.filter('stamp < ?', Date.today - 7).update(:state => 'archived')
229
229
 
230
230
  === Joining Tables
231
231
 
@@ -250,3 +250,14 @@ Which is equivalent to the SQL:
250
250
  ON order_items.item_id = items.id
251
251
  WHERE order_items.order_id = 1234
252
252
 
253
+ === Graphing Datasets
254
+
255
+ When retrieving records from joined datasets, you get the results in a single hash, which is subject to clobbering:
256
+
257
+ DB[:items].join(:order_items, :item_id => :id).first
258
+ => {:id=>(could be items.id or order_items.id), :item_id=>order_items.order_id}
259
+
260
+ Using graph, you can split the resulting dataset into separate hashes:
261
+
262
+ DB[:items].graph(:order_items, :item_id => :id).first
263
+ => {:items->{:id=>items.id}, :order_items=>{:id=>order_items.id, :item_id=>order_items.item_id}}
data/Rakefile CHANGED
@@ -9,16 +9,9 @@ include FileUtils
9
9
  # Configuration
10
10
  ##############################################################################
11
11
  NAME = "sequel_core"
12
- VERS = "1.4.0"
12
+ VERS = "1.5.0"
13
13
  CLEAN.include ["**/.*.sw?", "pkg/*", ".config", "doc/*", "coverage/*"]
14
- RDOC_OPTS = [
15
- "--quiet",
16
- "--title", "Sequel: The Database Toolkit for Ruby",
17
- "--opname", "index.html",
18
- "--line-numbers",
19
- "--main", "README",
20
- "--inline-source"
21
- ]
14
+ RDOC_OPTS = ["--quiet", "--line-numbers", "--inline-source"]
22
15
 
23
16
  ##############################################################################
24
17
  # RDoc
@@ -29,15 +22,14 @@ Rake::RDocTask.new do |rdoc|
29
22
  rdoc.rdoc_dir = "doc/rdoc"
30
23
  rdoc.options += RDOC_OPTS
31
24
  rdoc.main = "README"
32
- rdoc.title = "Sequel: The Database Toolkit for Ruby"
33
- rdoc.rdoc_files.add ["README", "COPYING", "lib/sequel_core.rb", "lib/**/*.rb"]
25
+ rdoc.title = "Sequel: The Database Toolkit for Ruby: Core Library and Adapters"
26
+ rdoc.rdoc_files.add ["README", "COPYING", "lib/**/*.rb"]
34
27
  end
35
28
 
36
29
  ##############################################################################
37
30
  # Gem packaging
38
31
  ##############################################################################
39
32
  desc "Packages up Sequel."
40
- task :default => [:package]
41
33
  task :package => [:clean]
42
34
 
43
35
  spec = Gem::Specification.new do |s|
@@ -47,28 +39,22 @@ spec = Gem::Specification.new do |s|
47
39
  s.platform = Gem::Platform::RUBY
48
40
  s.has_rdoc = true
49
41
  s.extra_rdoc_files = ["README", "CHANGELOG", "COPYING"]
50
- s.rdoc_options += RDOC_OPTS +
51
- ["--exclude", "^(examples|extras)\/", "--exclude", "lib/sequel_core.rb"]
42
+ s.rdoc_options += RDOC_OPTS + ["--exclude", "^(examples|extras)\/"]
52
43
  s.summary = "The Database Toolkit for Ruby: Core Library and Adapters"
53
44
  s.description = s.summary
54
- s.author = "Aman Gupta"
55
- s.email = "themastermind1@gmail.com"
45
+ s.author = "Jeremy Evans"
46
+ s.email = "code@jeremyevans.net"
56
47
  s.homepage = "http://sequel.rubyforge.org"
57
48
  s.executables = ["sequel"]
58
49
  s.required_ruby_version = ">= 1.8.4"
59
50
 
60
51
  s.add_dependency("metaid")
61
- s.add_dependency("assistance", ">= 0.1")
62
52
 
63
53
  case RUBY_PLATFORM
64
54
  when /java/
65
55
  s.platform = "jruby"
66
56
  else
67
57
  s.platform = Gem::Platform::RUBY
68
- # ParseTree on win32 requires PT2.1.1 and RI3.6.6
69
- s.add_dependency("RubyInline", ">= 3.6.6")
70
- s.add_dependency("ParseTree", ">= 2.1.1")
71
- s.add_dependency("ruby2ruby")
72
58
  end
73
59
 
74
60
  s.files = %w(COPYING README Rakefile) + Dir.glob("{bin,doc,spec,lib}/**/*")
@@ -122,6 +108,7 @@ Spec::Rake::SpecTask.new("spec") do |t|
122
108
  end
123
109
 
124
110
  desc "Run specs without coverage"
111
+ task :default => [:spec_no_cov]
125
112
  Spec::Rake::SpecTask.new("spec_no_cov") do |t|
126
113
  t.spec_files = FileList["spec/*_spec.rb"]
127
114
  t.spec_opts = File.read("spec/spec.opts").split("\n")
@@ -133,6 +120,14 @@ Spec::Rake::SpecTask.new("spec_adapters") do |t|
133
120
  t.spec_opts = File.read("spec/spec.opts").split("\n")
134
121
  end
135
122
 
123
+ %w'postgres sqlite mysql informix oracle'.each do |adapter|
124
+ desc "Run #{adapter} specs without coverage"
125
+ Spec::Rake::SpecTask.new("spec_#{adapter}") do |t|
126
+ t.spec_files = "spec/adapters/#{adapter}_spec.rb"
127
+ t.spec_opts = File.read("spec/spec.opts").split("\n")
128
+ end
129
+ end
130
+
136
131
  desc "Run all specs with coverage"
137
132
  Spec::Rake::SpecTask.new("spec_all") do |t|
138
133
  t.spec_files = FileList["spec/*_spec.rb", "spec/adapters/*_spec.rb"]
data/lib/sequel_core.rb CHANGED
@@ -1,18 +1,17 @@
1
- gem "assistance", ">= 0.1"
2
-
3
1
  require "metaid"
4
- require "assistance"
5
2
  require "bigdecimal"
6
3
  require "bigdecimal/util"
7
4
 
8
5
  files = %w[
9
- core_ext core_sql array_keys exceptions pretty_table
10
- dataset migration model schema database worker
6
+ deprecated core_ext core_sql connection_pool exceptions pretty_table
7
+ dataset migration schema database worker object_graph
11
8
  ]
12
9
  dir = File.join(File.dirname(__FILE__), "sequel_core")
13
10
  files.each {|f| require(File.join(dir, f))}
14
11
 
15
12
  module Sequel #:nodoc:
13
+ Deprecation.deprecation_message_stream = STDERR
14
+ #Deprecation.print_tracebacks = true
16
15
  class << self
17
16
  # call-seq:
18
17
  # Sequel::Database.connect(conn_string)
@@ -31,32 +30,23 @@ module Sequel #:nodoc:
31
30
  def single_threaded=(value)
32
31
  Database.single_threaded = value
33
32
  end
34
-
35
- def method_missing(m, *args)
36
- c = Database.adapter_class(m)
37
- begin
38
- # three ways to invoke this:
39
- # 0 arguments: Sequel.dbi
40
- # 1 argument: Sequel.dbi(db_name)
41
- # more args: Sequel.dbi(db_name, opts)
42
- case args.size
43
- when 0
44
- opts = {}
45
- when 1
46
- opts = args[0].is_a?(Hash) ? args[0] : {:database => args[0]}
47
- else
48
- opts = args[1].merge(:database => args[0])
33
+
34
+ def self.def_adapter_method(*adapters)
35
+ adapters.each do |adapter|
36
+ define_method(adapter) do |*args|
37
+ raise(::Sequel::Error, "Wrong number of arguments, 0-2 arguments valid") if args.length > 2
38
+ opts = {:adapter=>adapter.to_sym}
39
+ opts[:database] = args.shift if args.length >= 1 && !(args[0].is_a?(Hash))
40
+ if Hash === (arg = args[0])
41
+ opts.merge!(arg)
42
+ elsif !arg.nil?
43
+ raise ::Sequel::Error, "Wrong format of arguments, either use (), (String), (Hash), or (String, Hash)"
44
+ end
45
+ ::Sequel::Database.connect(opts)
49
46
  end
50
- rescue
51
- raise Error::AdapterNotFound, "Unknown adapter (#{m})"
52
47
  end
53
- c.new(opts)
54
48
  end
55
- end
56
- end
57
49
 
58
- class Object
59
- def Sequel(*args)
60
- Sequel.connect(*args)
50
+ def_adapter_method(*Database::ADAPTERS)
61
51
  end
62
52
  end
@@ -21,9 +21,9 @@ module Sequel
21
21
  end
22
22
 
23
23
  def connect
24
- dbname = @opts[:database]
24
+ s = "driver=#{@opts[:driver] || 'SQL Server'};server=#{@opts[:host]};database=#{@opts[:database]}#{";uid=#{@opts[:user]};pwd=#{@opts[:password]}" if @opts[:user]}"
25
25
  handle = WIN32OLE.new('ADODB.Connection')
26
- handle.Open(dbname)
26
+ handle.Open(s)
27
27
  handle
28
28
  end
29
29
 
@@ -72,18 +72,6 @@ module Sequel
72
72
  end
73
73
  end
74
74
 
75
- def array_tuples_fetch_rows(sql, &block)
76
- @db.synchronize do
77
- s = @db.execute sql
78
-
79
- @columns = s.Fields.extend(Enumerable).map {|x| x.Name.to_sym}
80
-
81
- s.moveFirst
82
- s.getRows.transpose.each {|r| r.keys = @columns; yield r}
83
- end
84
- self
85
- end
86
-
87
75
  def insert(*values)
88
76
  @db.do insert_sql(*values)
89
77
  end
@@ -97,4 +85,4 @@ module Sequel
97
85
  end
98
86
  end
99
87
  end
100
- end
88
+ end
@@ -90,19 +90,6 @@ module Sequel
90
90
  self
91
91
  end
92
92
 
93
- def array_tuples_fetch_rows(sql, &block)
94
- @db.synchronize do
95
- s = @db.execute sql
96
- begin
97
- @columns = s.column_names.map {|c| c.to_sym}
98
- s.fetch {|r| r.keys = @columns; yield r}
99
- ensure
100
- s.finish rescue nil
101
- end
102
- end
103
- self
104
- end
105
-
106
93
  def hash_row(stmt, row)
107
94
  @columns.inject({}) do |m, c|
108
95
  m[c] = row.shift
@@ -123,4 +110,4 @@ module Sequel
123
110
  end
124
111
  end
125
112
  end
126
- end
113
+ end
@@ -26,7 +26,7 @@ module Sequel
26
26
  # Returns number of rows affected
27
27
  def execute(sql)
28
28
  @logger.info(sql) if @logger
29
- @pool.hold {|c| c.do(sql)}
29
+ @pool.hold {|c| c.immediate(sql)}
30
30
  end
31
31
  alias_method :do, :execute
32
32
 
@@ -62,7 +62,7 @@ module Sequel
62
62
  @db.synchronize do
63
63
  @db.query(sql) do |cursor|
64
64
  begin
65
- cursor.open.each_hash {|r| block[r]}
65
+ cursor.open.each_hash(&block)
66
66
  ensure
67
67
  cursor.drop
68
68
  end
@@ -84,4 +84,4 @@ module Sequel
84
84
  end
85
85
  end
86
86
  end
87
- end
87
+ end
@@ -79,12 +79,12 @@ module Sequel
79
79
  meta = result.getMetaData
80
80
  column_count = meta.getColumnCount
81
81
  @columns = []
82
- column_count.times {|i| @columns << meta.getColumnName(i).to_sym}
82
+ column_count.times {|i| @columns << meta.getColumnName(i+1).to_sym}
83
83
 
84
84
  # get rows
85
85
  while result.next
86
86
  row = {}
87
- @columns.each_with_index {|v, i| row[v] = result.getObject(i)}
87
+ @columns.each_with_index {|v, i| row[v] = result.getObject(i+1)}
88
88
  yield row
89
89
  end
90
90
  end
@@ -154,11 +154,13 @@ module Sequel
154
154
  end
155
155
 
156
156
  def alter_table_sql(table, op)
157
+ type = type_literal(op[:type])
158
+ type << '(255)' if type == 'varchar'
157
159
  case op[:op]
158
160
  when :rename_column
159
- "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:new_name])} #{type_literal(op[:type])}"
161
+ "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:new_name])} #{type}"
160
162
  when :set_column_type
161
- "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:name])} #{type_literal(op[:type])}"
163
+ "ALTER TABLE #{table} CHANGE COLUMN #{literal(op[:name])} #{literal(op[:name])} #{type}"
162
164
  when :drop_index
163
165
  "DROP INDEX #{default_index_name(table, op[:columns])} ON #{table}"
164
166
  else
@@ -191,12 +193,16 @@ module Sequel
191
193
 
192
194
  def index_definition_sql(table_name, index)
193
195
  index_name = index[:name] || default_index_name(table_name, index[:columns])
194
- if index[:full_text]
196
+ unique = "UNIQUE " if index[:unique]
197
+ case index[:type]
198
+ when :full_text
195
199
  "CREATE FULLTEXT INDEX #{index_name} ON #{table_name} (#{literal(index[:columns])})"
196
- elsif index[:unique]
197
- "CREATE UNIQUE INDEX #{index_name} ON #{table_name} (#{literal(index[:columns])})"
200
+ when :spatial
201
+ "CREATE SPATIAL INDEX #{index_name} ON #{table_name} (#{literal(index[:columns])})"
202
+ when nil
203
+ "CREATE #{unique}INDEX #{index_name} ON #{table_name} (#{literal(index[:columns])})"
198
204
  else
199
- "CREATE INDEX #{index_name} ON #{table_name} (#{literal(index[:columns])})"
205
+ "CREATE #{unique}INDEX #{index_name} ON #{table_name} (#{literal(index[:columns])}) USING #{index[:type]}"
200
206
  end
201
207
  end
202
208
 
@@ -290,26 +296,22 @@ module Sequel
290
296
  # @ds.join_expr(:natural_left_outer, :nodes)
291
297
  # # 'NATURAL LEFT OUTER JOIN nodes'
292
298
  #
293
- def join_expr(type, table, expr = nil)
294
- join_type = JOIN_TYPES[type || :inner]
295
- unless join_type
296
- raise Error::InvalidJoinType, "Invalid join type: #{type}"
297
- end
298
- @opts[:server_version] = @db.server_version unless @opts[:server_version]
299
+ def join_expr(type, table, expr = nil, options = {})
300
+ raise Error::InvalidJoinType, "Invalid join type: #{type}" unless join_type = JOIN_TYPES[type || :inner]
301
+
302
+ server_version = @opts[:server_version] ||= @db.server_version
299
303
  type = :inner if type == :cross && !expr.nil?
300
- if type.to_s =~ /natural|cross|straight/ && @opts[:server_version] >= 50014
301
- tbl_factor = table.to_s
302
- if table.is_a?(Dataset)
303
- tbl_factor = table.sql << " AS t1"
304
- end
305
- if table.is_a?(Array)
306
- tbl_factor = "( #{literal(table)} )"
307
- end
308
- join_string = "#{join_type} " << tbl_factor
309
- return join_string
304
+
305
+ if (server_version >= 50014) && /\Anatural|cross|straight\z/.match(type.to_s)
306
+ table = "( #{literal(table)} )" if table.is_a?(Array)
307
+ "#{join_type} #{table}"
308
+ else
309
+ super
310
310
  end
311
+ end
311
312
 
312
- super
313
+ def insert_default_values_sql
314
+ "INSERT INTO #{source_list(@opts[:from])} () VALUES ()"
313
315
  end
314
316
 
315
317
  def match_expr(l, r)
@@ -414,8 +416,9 @@ module Sequel
414
416
  end
415
417
 
416
418
  def replace_sql(*values)
419
+ from = source_list(@opts[:from])
417
420
  if values.empty?
418
- "REPLACE INTO #{@opts[:from]} DEFAULT VALUES"
421
+ "REPLACE INTO #{from} DEFAULT VALUES"
419
422
  else
420
423
  values = values[0] if values.size == 1
421
424
 
@@ -425,30 +428,32 @@ module Sequel
425
428
  end
426
429
 
427
430
  case values
428
- when Sequel::Model
429
- insert_sql(values.values)
430
431
  when Array
431
432
  if values.empty?
432
- "REPLACE INTO #{@opts[:from]} DEFAULT VALUES"
433
+ "REPLACE INTO #{from} DEFAULT VALUES"
433
434
  elsif values.keys
434
435
  fl = values.keys.map {|f| literal(f.is_a?(String) ? f.to_sym : f)}
435
436
  vl = values.values.map {|v| literal(v)}
436
- "REPLACE INTO #{@opts[:from]} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
437
+ "REPLACE INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
437
438
  else
438
- "REPLACE INTO #{@opts[:from]} VALUES (#{literal(values)})"
439
+ "REPLACE INTO #{from} VALUES (#{literal(values)})"
439
440
  end
440
441
  when Hash
441
442
  if values.empty?
442
- "REPLACE INTO #{@opts[:from]} DEFAULT VALUES"
443
+ "REPLACE INTO #{from} DEFAULT VALUES"
443
444
  else
444
445
  fl, vl = [], []
445
446
  values.each {|k, v| fl << literal(k.is_a?(String) ? k.to_sym : k); vl << literal(v)}
446
- "REPLACE INTO #{@opts[:from]} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
447
+ "REPLACE INTO #{from} (#{fl.join(COMMA_SEPARATOR)}) VALUES (#{vl.join(COMMA_SEPARATOR)})"
447
448
  end
448
449
  when Dataset
449
- "REPLACE INTO #{@opts[:from]} #{literal(values)}"
450
+ "REPLACE INTO #{from} #{literal(values)}"
450
451
  else
451
- "REPLACE INTO #{@opts[:from]} VALUES (#{literal(values)})"
452
+ if values.respond_to?(:values)
453
+ replace_sql(values.values)
454
+ else
455
+ "REPLACE INTO #{from} VALUES (#{literal(values)})"
456
+ end
452
457
  end
453
458
  end
454
459
  end
@@ -492,35 +497,10 @@ module Sequel
492
497
  self
493
498
  end
494
499
 
495
- def array_tuples_fetch_rows(sql, &block)
496
- @db.execute_select(sql) do |r|
497
- @columns = r.columns
498
- r.each_array(&block)
499
- end
500
- self
501
- end
502
-
503
- def array_tuples_transform_load(r)
504
- a = []; a.keys = []
505
- r.each_pair do |k, v|
506
- a[k] = (tt = @transform[k]) ? tt[0][v] : v
507
- end
508
- a
509
- end
510
-
511
- # Applies the value transform for data saved to the database.
512
- def array_tuples_transform_save(r)
513
- a = []; a.keys = []
514
- r.each_pair do |k, v|
515
- a[k] = (tt = @transform[k]) ? tt[1][v] : v
516
- end
517
- a
518
- end
519
-
520
500
  def multi_insert_sql(columns, values)
521
501
  columns = literal(columns)
522
502
  values = values.map {|r| "(#{literal(r)})"}.join(COMMA_SEPARATOR)
523
- ["INSERT INTO #{@opts[:from]} (#{columns}) VALUES #{values}"]
503
+ ["INSERT INTO #{source_list(@opts[:from])} (#{columns}) VALUES #{values}"]
524
504
  end
525
505
  end
526
506
  end