akitaonrails-activerecord-sqlserver-adapter 1.1.0
Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,855 @@
|
|
1
|
+
require 'active_record/connection_adapters/abstract_adapter'
|
2
|
+
|
3
|
+
require 'bigdecimal'
|
4
|
+
require 'bigdecimal/util'
|
5
|
+
|
6
|
+
# sqlserver_adapter.rb -- ActiveRecord adapter for Microsoft SQL Server
|
7
|
+
#
|
8
|
+
# Author: Joey Gibson <joey@joeygibson.com>
|
9
|
+
# Date: 10/14/2004
|
10
|
+
#
|
11
|
+
# Modifications: DeLynn Berry <delynnb@megastarfinancial.com>
|
12
|
+
# Date: 3/22/2005
|
13
|
+
#
|
14
|
+
# Modifications (ODBC): Mark Imbriaco <mark.imbriaco@pobox.com>
|
15
|
+
# Date: 6/26/2005
|
16
|
+
|
17
|
+
# Modifications (Migrations): Tom Ward <tom@popdog.net>
|
18
|
+
# Date: 27/10/2005
|
19
|
+
#
|
20
|
+
# Modifications (Numerous fixes as maintainer): Ryan Tomayko <rtomayko@gmail.com>
|
21
|
+
# Date: Up to July 2006
|
22
|
+
|
23
|
+
# Current maintainer: Tom Ward <tom@popdog.net>
|
24
|
+
|
25
|
+
module ActiveRecord
|
26
|
+
class Base
|
27
|
+
def self.sqlserver_connection(config) #:nodoc:
|
28
|
+
require_library_or_gem 'dbi' unless self.class.const_defined?(:DBI)
|
29
|
+
|
30
|
+
config = config.symbolize_keys
|
31
|
+
|
32
|
+
mode = config[:mode] ? config[:mode].to_s.upcase : 'ADO'
|
33
|
+
username = config[:username] ? config[:username].to_s : 'sa'
|
34
|
+
password = config[:password] ? config[:password].to_s : ''
|
35
|
+
autocommit = config.key?(:autocommit) ? config[:autocommit] : true
|
36
|
+
if mode == "ODBC"
|
37
|
+
raise ArgumentError, "Missing DSN. Argument ':dsn' must be set in order for this adapter to work." unless config.has_key?(:dsn)
|
38
|
+
dsn = config[:dsn]
|
39
|
+
driver_url = "DBI:ODBC:#{dsn}"
|
40
|
+
else
|
41
|
+
raise ArgumentError, "Missing Database. Argument ':database' must be set in order for this adapter to work." unless config.has_key?(:database)
|
42
|
+
database = config[:database]
|
43
|
+
host = config[:host] ? config[:host].to_s : 'localhost'
|
44
|
+
unless config[:trusted_connection]
|
45
|
+
driver_url = "DBI:ADO:Provider=SQLOLEDB;Data Source=#{host};Initial Catalog=#{database};User Id=#{username};Password=#{password};"
|
46
|
+
else
|
47
|
+
driver_url = "DBI:ADO:Provider=SQLOLEDB;Data Source=#{host};Initial Catalog=#{database};Trusted_Connection=Yes;"
|
48
|
+
end
|
49
|
+
end
|
50
|
+
conn = DBI.connect(driver_url, username, password)
|
51
|
+
conn["AutoCommit"] = autocommit
|
52
|
+
ConnectionAdapters::SQLServerAdapter.new(conn, logger, [driver_url, username, password])
|
53
|
+
end
|
54
|
+
|
55
|
+
# Overridden to include support for SQL server's lack of = operator on
|
56
|
+
# text/ntext/image columns LIKE operator is used instead
|
57
|
+
def self.sanitize_sql_hash(attrs)
|
58
|
+
conditions = attrs.map do |attr, value|
|
59
|
+
col = self.columns.find {|c| c.name == attr}
|
60
|
+
if col && col.respond_to?("is_special") && col.is_special
|
61
|
+
"#{table_name}.#{connection.quote_column_name(attr)} LIKE ?"
|
62
|
+
else
|
63
|
+
"#{table_name}.#{connection.quote_column_name(attr)} #{attribute_condition(value)}"
|
64
|
+
end
|
65
|
+
end.join(' AND ')
|
66
|
+
replace_bind_variables(conditions, expand_range_bind_variables(attrs.values))
|
67
|
+
end
|
68
|
+
|
69
|
+
# In the case of SQL server, the lock value must follow the FROM clause
|
70
|
+
def self.construct_finder_sql(options)
|
71
|
+
scope = scope(:find)
|
72
|
+
sql = "SELECT #{(scope && scope[:select]) || options[:select] || '*'} "
|
73
|
+
sql << "FROM #{(scope && scope[:from]) || options[:from] || table_name} "
|
74
|
+
|
75
|
+
if ActiveRecord::Base.connection.adapter_name == "SQLServer" && !options[:lock].blank? # SQLServer
|
76
|
+
add_lock!(sql, options, scope)
|
77
|
+
end
|
78
|
+
|
79
|
+
add_joins!(sql, options, scope)
|
80
|
+
add_conditions!(sql, options[:conditions], scope)
|
81
|
+
|
82
|
+
sql << " GROUP BY #{options[:group]} " if options[:group]
|
83
|
+
|
84
|
+
add_order!(sql, options[:order], scope)
|
85
|
+
add_limit!(sql, options, scope)
|
86
|
+
add_lock!(sql, options, scope) unless ActiveRecord::Base.connection.adapter_name == "SQLServer" # SQLServer
|
87
|
+
# $log.debug "database_helper: construct_finder_sql: sql at end: #{sql.inspect}"
|
88
|
+
sql
|
89
|
+
end
|
90
|
+
end # class Base
|
91
|
+
|
92
|
+
module ConnectionAdapters
|
93
|
+
class SQLServerColumn < Column# :nodoc:
|
94
|
+
attr_reader :identity, :is_special
|
95
|
+
|
96
|
+
def initialize(name, default, sql_type = nil, identity = false, null = true) # TODO: check ok to remove scale_value = 0
|
97
|
+
super(name, default, sql_type, null)
|
98
|
+
@identity = identity
|
99
|
+
@is_special = sql_type =~ /text|ntext|image/i
|
100
|
+
# TODO: check ok to remove @scale = scale_value
|
101
|
+
# SQL Server only supports limits on *char and float types
|
102
|
+
@limit = nil unless @type == :string
|
103
|
+
end
|
104
|
+
|
105
|
+
def simplified_type(field_type)
|
106
|
+
case field_type
|
107
|
+
when /real/i then :float
|
108
|
+
when /money/i then :decimal
|
109
|
+
when /image/i then :binary
|
110
|
+
when /bit/i then :boolean
|
111
|
+
when /uniqueidentifier/i then :string
|
112
|
+
else super
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
def type_cast(value)
|
117
|
+
return nil if value.nil?
|
118
|
+
case type
|
119
|
+
when :datetime then cast_to_datetime(value)
|
120
|
+
when :timestamp then cast_to_time(value)
|
121
|
+
when :time then cast_to_time(value)
|
122
|
+
when :date then cast_to_datetime(value)
|
123
|
+
when :boolean then value == true or (value =~ /^t(rue)?$/i) == 0 or value.to_s == '1'
|
124
|
+
else super
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def cast_to_time(value)
|
129
|
+
return value if value.is_a?(Time)
|
130
|
+
time_array = ParseDate.parsedate(value)
|
131
|
+
Time.send(Base.default_timezone, *time_array) rescue nil
|
132
|
+
end
|
133
|
+
|
134
|
+
def cast_to_datetime(value)
|
135
|
+
return value.to_time if value.is_a?(DBI::Timestamp)
|
136
|
+
|
137
|
+
if value.is_a?(Time)
|
138
|
+
if value.year != 0 and value.month != 0 and value.day != 0
|
139
|
+
return value
|
140
|
+
else
|
141
|
+
return Time.mktime(2000, 1, 1, value.hour, value.min, value.sec) rescue nil
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
if value.is_a?(DateTime)
|
146
|
+
return Time.mktime(value.year, value.mon, value.day, value.hour, value.min, value.sec)
|
147
|
+
end
|
148
|
+
|
149
|
+
return cast_to_time(value) if value.is_a?(Date) or value.is_a?(String) rescue nil
|
150
|
+
value
|
151
|
+
end
|
152
|
+
|
153
|
+
# TODO: Find less hack way to convert DateTime objects into Times
|
154
|
+
|
155
|
+
def self.string_to_time(value)
|
156
|
+
if value.is_a?(DateTime)
|
157
|
+
return Time.mktime(value.year, value.mon, value.day, value.hour, value.min, value.sec)
|
158
|
+
else
|
159
|
+
super
|
160
|
+
end
|
161
|
+
end
|
162
|
+
|
163
|
+
# These methods will only allow the adapter to insert binary data with a length of 7K or less
|
164
|
+
# because of a SQL Server statement length policy.
|
165
|
+
# def self.string_to_binary(value)
|
166
|
+
# value.gsub(/(\r|\n|\0|\x1a)/) do
|
167
|
+
# case $1
|
168
|
+
# when "\r" then "%00"
|
169
|
+
# when "\n" then "%01"
|
170
|
+
# when "\0" then "%02"
|
171
|
+
# when "\x1a" then "%03"
|
172
|
+
# end
|
173
|
+
# end
|
174
|
+
# end
|
175
|
+
#
|
176
|
+
# def self.binary_to_string(value)
|
177
|
+
# value.gsub(/(%00|%01|%02|%03)/) do
|
178
|
+
# case $1
|
179
|
+
# when "%00" then "\r"
|
180
|
+
# when "%01" then "\n"
|
181
|
+
# when "%02\0" then "\0"
|
182
|
+
# when "%03" then "\x1a"
|
183
|
+
# end
|
184
|
+
# end
|
185
|
+
# end
|
186
|
+
# end
|
187
|
+
|
188
|
+
# These methods will only allow the adapter to insert binary data with a length of 7K or less
|
189
|
+
# because of a SQL Server statement length policy.
|
190
|
+
# Convert strings to hex before storing in the database
|
191
|
+
def self.string_to_binary(value)
|
192
|
+
"0x#{value.unpack("H*")[0]}"
|
193
|
+
end
|
194
|
+
|
195
|
+
def self.binary_to_string(value)
|
196
|
+
# TODO: Need to remove conditional pack (should always have to pack hex characters into blob)
|
197
|
+
# Assigning a value to a binary column causes the string_to_binary to hexify it
|
198
|
+
# This hex value is stored in the DB but the original value is retained in the
|
199
|
+
# cache. By forcing reload, the value coming into binary_to_string will always
|
200
|
+
# be hex. Need to force reload or update the cached column's value to match what is sent to the DB.
|
201
|
+
value =~ /[^[:xdigit:]]/ ? value : [value].pack('H*')
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
# In ADO mode, this adapter will ONLY work on Windows systems,
|
206
|
+
# since it relies on Win32OLE, which, to my knowledge, is only
|
207
|
+
# available on Windows.
|
208
|
+
#
|
209
|
+
# This mode also relies on the ADO support in the DBI module. If you are using the
|
210
|
+
# one-click installer of Ruby, then you already have DBI installed, but
|
211
|
+
# the ADO module is *NOT* installed. You will need to get the latest
|
212
|
+
# source distribution of Ruby-DBI from http://ruby-dbi.sourceforge.net/
|
213
|
+
# unzip it, and copy the file
|
214
|
+
# <tt>src/lib/dbd_ado/ADO.rb</tt>
|
215
|
+
# to
|
216
|
+
# <tt>X:/Ruby/lib/ruby/site_ruby/1.8/DBD/ADO/ADO.rb</tt>
|
217
|
+
# (you will more than likely need to create the ADO directory).
|
218
|
+
# Once you've installed that file, you are ready to go.
|
219
|
+
#
|
220
|
+
# In ODBC mode, the adapter requires the ODBC support in the DBI module which requires
|
221
|
+
# the Ruby ODBC module. Ruby ODBC 0.996 was used in development and testing,
|
222
|
+
# and it is available at http://www.ch-werner.de/rubyodbc/
|
223
|
+
#
|
224
|
+
# Options:
|
225
|
+
#
|
226
|
+
# * <tt>:mode</tt> -- ADO or ODBC. Defaults to ADO.
|
227
|
+
# * <tt>:username</tt> -- Defaults to sa.
|
228
|
+
# * <tt>:password</tt> -- Defaults to empty string.
|
229
|
+
# * <tt>:windows_auth</tt> -- Defaults to "User ID=#{username};Password=#{password}"
|
230
|
+
#
|
231
|
+
# ADO specific options:
|
232
|
+
#
|
233
|
+
# * <tt>:host</tt> -- Defaults to localhost.
|
234
|
+
# * <tt>:database</tt> -- The name of the database. No default, must be provided.
|
235
|
+
# * <tt>:windows_auth</tt> -- Use windows authentication instead of username/password.
|
236
|
+
#
|
237
|
+
# ODBC specific options:
|
238
|
+
#
|
239
|
+
# * <tt>:dsn</tt> -- Defaults to nothing.
|
240
|
+
#
|
241
|
+
# ADO code tested on Windows 2000 and higher systems,
|
242
|
+
# running ruby 1.8.2 (2004-07-29) [i386-mswin32], and SQL Server 2000 SP3.
|
243
|
+
#
|
244
|
+
# ODBC code tested on a Fedora Core 4 system, running FreeTDS 0.63,
|
245
|
+
# unixODBC 2.2.11, Ruby ODBC 0.996, Ruby DBI 0.0.23 and Ruby 1.8.2.
|
246
|
+
# [Linux strongmad 2.6.11-1.1369_FC4 #1 Thu Jun 2 22:55:56 EDT 2005 i686 i686 i386 GNU/Linux]
|
247
|
+
class SQLServerAdapter < AbstractAdapter
|
248
|
+
|
249
|
+
# add synchronization to adapter to prevent 'invalid cursor state' error
|
250
|
+
require 'sync'
|
251
|
+
def initialize(connection, logger, connection_options=nil)
|
252
|
+
super(connection, logger)
|
253
|
+
@connection_options = connection_options
|
254
|
+
@sql_connection_lock = Sync.new
|
255
|
+
end
|
256
|
+
|
257
|
+
def native_database_types
|
258
|
+
{
|
259
|
+
:primary_key => "int NOT NULL IDENTITY(1, 1) PRIMARY KEY",
|
260
|
+
:string => { :name => "varchar", :limit => 255 },
|
261
|
+
:text => { :name => "varchar(max)" },
|
262
|
+
:integer => { :name => "int" },
|
263
|
+
:float => { :name => "float", :limit => 8 },
|
264
|
+
:decimal => { :name => "decimal" },
|
265
|
+
:datetime => { :name => "datetime" },
|
266
|
+
:timestamp => { :name => "datetime" },
|
267
|
+
:time => { :name => "datetime" },
|
268
|
+
:date => { :name => "datetime" },
|
269
|
+
:binary => { :name => "varbinary(max)"},
|
270
|
+
:boolean => { :name => "bit"}
|
271
|
+
}
|
272
|
+
end
|
273
|
+
|
274
|
+
def adapter_name
|
275
|
+
'SQLServer'
|
276
|
+
end
|
277
|
+
|
278
|
+
def supports_migrations? #:nodoc:
|
279
|
+
true
|
280
|
+
end
|
281
|
+
|
282
|
+
def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
|
283
|
+
return super unless type.to_s == 'integer'
|
284
|
+
|
285
|
+
if limit.nil? || limit == 4
|
286
|
+
'integer'
|
287
|
+
elsif limit < 4
|
288
|
+
'smallint'
|
289
|
+
else
|
290
|
+
'bigint'
|
291
|
+
end
|
292
|
+
end
|
293
|
+
|
294
|
+
# Returns a table's primary key and belonging sequence (not applicable to SQL server).
|
295
|
+
def pk_and_sequence_for(table_name)
|
296
|
+
@connection["AutoCommit"] = false
|
297
|
+
keys = []
|
298
|
+
execute("EXEC sp_helpindex '#{table_name}'") do |handle|
|
299
|
+
if handle.column_info.any?
|
300
|
+
pk_index = handle.detect {|index| index[1] =~ /primary key/ }
|
301
|
+
keys << pk_index[2] if pk_index
|
302
|
+
end
|
303
|
+
end
|
304
|
+
keys.length == 1 ? [keys.first, nil] : nil
|
305
|
+
ensure
|
306
|
+
@connection["AutoCommit"] = true
|
307
|
+
end
|
308
|
+
|
309
|
+
# allows to set owner in table name with dot notation
|
310
|
+
def quote_table_name(name)
|
311
|
+
names = name.split('.')
|
312
|
+
names.size == 1 ? "[#{names[0]}]" : "[#{names[0]}].[#{names[1]}]"
|
313
|
+
end
|
314
|
+
|
315
|
+
# CONNECTION MANAGEMENT ====================================#
|
316
|
+
|
317
|
+
# Returns true if the connection is active.
|
318
|
+
def active?
|
319
|
+
@connection.execute("SELECT 1").finish
|
320
|
+
true
|
321
|
+
rescue DBI::DatabaseError, DBI::InterfaceError
|
322
|
+
false
|
323
|
+
end
|
324
|
+
|
325
|
+
# Reconnects to the database, returns false if no connection could be made.
|
326
|
+
def reconnect!
|
327
|
+
disconnect!
|
328
|
+
@connection = DBI.connect(*@connection_options)
|
329
|
+
rescue DBI::DatabaseError => e
|
330
|
+
@logger.warn "#{adapter_name} reconnection failed: #{e.message}" if @logger
|
331
|
+
false
|
332
|
+
end
|
333
|
+
|
334
|
+
# Disconnects from the database
|
335
|
+
|
336
|
+
def disconnect!
|
337
|
+
@sql_connection_lock.synchronize(:EX) do
|
338
|
+
begin
|
339
|
+
@connection.disconnect
|
340
|
+
rescue nil
|
341
|
+
end
|
342
|
+
end
|
343
|
+
end
|
344
|
+
|
345
|
+
# Add synchronization for the db connection to ensure no one else is using this one
|
346
|
+
# prevents 'invalid cursor state' error
|
347
|
+
def select_rows(sql, name = nil)
|
348
|
+
rows = []
|
349
|
+
repair_special_columns(sql)
|
350
|
+
log(sql, name) do
|
351
|
+
@sql_connection_lock.synchronize(:EX) do
|
352
|
+
@connection.select_all(sql) do |row|
|
353
|
+
record = []
|
354
|
+
row.each do |col|
|
355
|
+
if col.is_a? DBI::Timestamp
|
356
|
+
record << col.to_time
|
357
|
+
else
|
358
|
+
record << col
|
359
|
+
end
|
360
|
+
end
|
361
|
+
rows << record
|
362
|
+
end
|
363
|
+
end
|
364
|
+
end
|
365
|
+
rows
|
366
|
+
end
|
367
|
+
|
368
|
+
def columns(table_name, name = nil)
|
369
|
+
return [] if table_name.blank?
|
370
|
+
table_name = table_name.to_s if table_name.is_a?(Symbol)
|
371
|
+
table_name = table_name.split('.')[-1] unless table_name.nil?
|
372
|
+
table_name = table_name.gsub(/[\[\]]/, '')
|
373
|
+
sql = %Q{
|
374
|
+
SELECT
|
375
|
+
clmns.name AS ColName,
|
376
|
+
object_definition(clmns.default_object_id) as DefaultValue,
|
377
|
+
CAST(clmns.scale AS int) AS numeric_scale,
|
378
|
+
CAST(clmns.precision AS int) AS numeric_precision,
|
379
|
+
usrt.name AS ColType,
|
380
|
+
case clmns.is_nullable when 0 then 'NO' else 'YES' end AS IsNullable,
|
381
|
+
CAST(CASE WHEN baset.name IN (N'nchar', N'nvarchar') AND clmns.max_length <> -1 THEN
|
382
|
+
clmns.max_length/2 ELSE clmns.max_length END AS int) AS Length,
|
383
|
+
clmns.is_identity as IsIdentity
|
384
|
+
FROM
|
385
|
+
sys.tables AS tbl
|
386
|
+
INNER JOIN sys.all_columns AS clmns ON clmns.object_id=tbl.object_id
|
387
|
+
LEFT OUTER JOIN sys.types AS usrt ON usrt.user_type_id = clmns.user_type_id
|
388
|
+
LEFT OUTER JOIN sys.types AS baset ON baset.user_type_id = clmns.system_type_id and
|
389
|
+
baset.user_type_id = baset.system_type_id
|
390
|
+
WHERE
|
391
|
+
(tbl.name=N'#{table_name}' )
|
392
|
+
ORDER BY
|
393
|
+
clmns.column_id ASC
|
394
|
+
}
|
395
|
+
# Comment out if you want to have the Columns select statment logged.
|
396
|
+
# Personally, I think it adds unnecessary bloat to the log.
|
397
|
+
# If you do comment it out, make sure to un-comment the "result" line that follows
|
398
|
+
result = log(sql, name) do
|
399
|
+
@sql_connection_lock.synchronize(:EX) do
|
400
|
+
@connection.select_all(sql)
|
401
|
+
end
|
402
|
+
end
|
403
|
+
#result = @connection.select_all(sql)
|
404
|
+
columns = []
|
405
|
+
result.each do |field|
|
406
|
+
default = field[:DefaultValue].to_s.gsub!(/[()\']/,"") =~ /null|NULL/ ? nil : field[:DefaultValue]
|
407
|
+
if field[:ColType] =~ /numeric|decimal/i
|
408
|
+
type = "#{field[:ColType]}(#{field[:numeric_precision]},#{field[:numeric_scale]})"
|
409
|
+
else
|
410
|
+
type = "#{field[:ColType]}(#{field[:Length]})"
|
411
|
+
end
|
412
|
+
is_identity = field[:IsIdentity] == 1
|
413
|
+
is_nullable = field[:IsNullable] == 'YES'
|
414
|
+
columns << SQLServerColumn.new(field[:ColName], default, type, is_identity, is_nullable)
|
415
|
+
end
|
416
|
+
columns
|
417
|
+
end
|
418
|
+
|
419
|
+
def insert(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
|
420
|
+
execute(sql, name)
|
421
|
+
id_value || select_one("SELECT scope_identity() AS Ident")["Ident"]
|
422
|
+
end
|
423
|
+
|
424
|
+
def update(sql, name = nil)
|
425
|
+
execute(sql, name) do |handle|
|
426
|
+
handle.rows
|
427
|
+
end || select_one("SELECT @@ROWCOUNT AS AffectedRows")["AffectedRows"]
|
428
|
+
end
|
429
|
+
|
430
|
+
alias_method :delete, :update
|
431
|
+
|
432
|
+
# override execute to synchronize the connection
|
433
|
+
def execute(sql, name = nil)
|
434
|
+
if sql =~ /^\s*INSERT/i && (table_name = query_requires_identity_insert?(sql))
|
435
|
+
log(sql, name) do
|
436
|
+
with_identity_insert_enabled(table_name) do
|
437
|
+
@sql_connection_lock.synchronize(:EX) do
|
438
|
+
@connection.execute(sql) do |handle|
|
439
|
+
yield(handle) if block_given?
|
440
|
+
end
|
441
|
+
end
|
442
|
+
end
|
443
|
+
end
|
444
|
+
else
|
445
|
+
log(sql, name) do
|
446
|
+
@sql_connection_lock.synchronize(:EX) do
|
447
|
+
@connection.execute(sql) do |handle|
|
448
|
+
yield(handle) if block_given?
|
449
|
+
end
|
450
|
+
end
|
451
|
+
end
|
452
|
+
end
|
453
|
+
end
|
454
|
+
|
455
|
+
# Add synchronization for the db connection to ensure no one else is using this one
|
456
|
+
# prevents 'Could not change transaction status' error
|
457
|
+
def begin_db_transaction
|
458
|
+
@sql_connection_lock.synchronize(:EX) do
|
459
|
+
begin
|
460
|
+
@connection["AutoCommit"] = false
|
461
|
+
rescue Exception => e
|
462
|
+
@connection["AutoCommit"] = true
|
463
|
+
end
|
464
|
+
end
|
465
|
+
end
|
466
|
+
def commit_db_transaction
|
467
|
+
@sql_connection_lock.synchronize(:EX) do
|
468
|
+
begin
|
469
|
+
@connection.commit
|
470
|
+
ensure
|
471
|
+
@connection["AutoCommit"] = true
|
472
|
+
end
|
473
|
+
end
|
474
|
+
end
|
475
|
+
|
476
|
+
def rollback_db_transaction
|
477
|
+
@sql_connection_lock.synchronize(:EX) do
|
478
|
+
begin
|
479
|
+
@connection.rollback
|
480
|
+
ensure
|
481
|
+
@connection["AutoCommit"] = true
|
482
|
+
end
|
483
|
+
end
|
484
|
+
end
|
485
|
+
|
486
|
+
def quote(value, column = nil)
|
487
|
+
return value.quoted_id if value.respond_to?(:quoted_id)
|
488
|
+
|
489
|
+
case value
|
490
|
+
when TrueClass then '1'
|
491
|
+
when FalseClass then '0'
|
492
|
+
else
|
493
|
+
if value.acts_like?(:time)
|
494
|
+
"'#{value.strftime("%Y%m%d %H:%M:%S")}'"
|
495
|
+
elsif value.acts_like?(:date)
|
496
|
+
"'#{value.strftime("%Y%m%d")}'"
|
497
|
+
else
|
498
|
+
super
|
499
|
+
end
|
500
|
+
end
|
501
|
+
end
|
502
|
+
|
503
|
+
def quote_string(string)
|
504
|
+
string.gsub(/\'/, "''")
|
505
|
+
end
|
506
|
+
|
507
|
+
def quoted_true
|
508
|
+
"1"
|
509
|
+
end
|
510
|
+
|
511
|
+
def quoted_false
|
512
|
+
"0"
|
513
|
+
end
|
514
|
+
|
515
|
+
def quote_column_name(name)
|
516
|
+
"[#{name}]"
|
517
|
+
end
|
518
|
+
|
519
|
+
def add_limit_offset!(sql, options)
|
520
|
+
if options[:limit] and options[:offset]
|
521
|
+
total_rows = @connection.select_all("SELECT count(*) as TotalRows from (#{sql.gsub(/\bSELECT(\s+DISTINCT)?\b/i, "SELECT#{$1} TOP 1000000000")}) tally")[0][:TotalRows].to_i
|
522
|
+
if (options[:limit] + options[:offset]) >= total_rows
|
523
|
+
options[:limit] = (total_rows - options[:offset] >= 0) ? (total_rows - options[:offset]) : 0
|
524
|
+
end
|
525
|
+
sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i, "SELECT * FROM (SELECT TOP #{options[:limit]} * FROM (SELECT#{$1} TOP #{options[:limit] + options[:offset]} ")
|
526
|
+
sql << ") AS tmp1"
|
527
|
+
if options[:order]
|
528
|
+
options[:order] = options[:order].split(',').map do |field|
|
529
|
+
parts = field.split(" ")
|
530
|
+
tc = parts[0]
|
531
|
+
if sql =~ /\.\[/ and tc =~ /\./ # if column quoting used in query
|
532
|
+
tc.gsub!(/\./, '\\.\\[')
|
533
|
+
tc << '\\]'
|
534
|
+
end
|
535
|
+
if sql =~ /#{tc} AS (t\d_r\d\d?)/
|
536
|
+
parts[0] = $1
|
537
|
+
elsif parts[0] =~ /\w+\.(\w+)/
|
538
|
+
parts[0] = $1
|
539
|
+
end
|
540
|
+
parts.join(' ')
|
541
|
+
end.join(', ')
|
542
|
+
sql << " ORDER BY #{change_order_direction(options[:order])}) AS tmp2 ORDER BY #{options[:order]}"
|
543
|
+
else
|
544
|
+
sql << " ) AS tmp2"
|
545
|
+
end
|
546
|
+
elsif sql !~ /^\s*SELECT (@@|COUNT\()/i
|
547
|
+
sql.sub!(/^\s*SELECT(\s+DISTINCT)?/i) do
|
548
|
+
"SELECT#{$1} TOP #{options[:limit]}"
|
549
|
+
end unless options[:limit].nil?
|
550
|
+
end
|
551
|
+
end
|
552
|
+
|
553
|
+
def recreate_database(name)
|
554
|
+
drop_database(name)
|
555
|
+
create_database(name)
|
556
|
+
end
|
557
|
+
|
558
|
+
def drop_database(name)
|
559
|
+
execute "DROP DATABASE #{name}"
|
560
|
+
end
|
561
|
+
|
562
|
+
def create_database(name)
|
563
|
+
execute "CREATE DATABASE #{name}"
|
564
|
+
end
|
565
|
+
|
566
|
+
def current_database
|
567
|
+
@connection.select_one("select DB_NAME()")[0]
|
568
|
+
end
|
569
|
+
|
570
|
+
def tables(name = nil)
|
571
|
+
execute("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE'", name) do |sth|
|
572
|
+
sth.inject([]) do |tables, field|
|
573
|
+
table_name = field[0]
|
574
|
+
tables << table_name unless table_name == 'dtproperties'
|
575
|
+
tables
|
576
|
+
end
|
577
|
+
end
|
578
|
+
end
|
579
|
+
|
580
|
+
def indexes(table_name, name = nil)
|
581
|
+
ActiveRecord::Base.connection.instance_variable_get("@connection")["AutoCommit"] = false
|
582
|
+
indexes = []
|
583
|
+
execute("EXEC sp_helpindex '#{table_name}'", name) do |sth|
|
584
|
+
sth.each do |index|
|
585
|
+
unique = index[1] =~ /unique/
|
586
|
+
primary = index[1] =~ /primary key/
|
587
|
+
if !primary
|
588
|
+
indexes << IndexDefinition.new(table_name, index[0], unique, index[2].split(", "))
|
589
|
+
end
|
590
|
+
end
|
591
|
+
end
|
592
|
+
indexes
|
593
|
+
ensure
|
594
|
+
ActiveRecord::Base.connection.instance_variable_get("@connection")["AutoCommit"] = true
|
595
|
+
end
|
596
|
+
|
597
|
+
def add_order_by_for_association_limiting!(sql, options)
|
598
|
+
# Just skip ORDER BY clause. I dont know better solution for DISTINCT plus ORDER BY.
|
599
|
+
# And this doesnt cause to much problem..
|
600
|
+
return sql
|
601
|
+
end
|
602
|
+
|
603
|
+
def rename_table(name, new_name)
|
604
|
+
execute "EXEC sp_rename '#{name}', '#{new_name}'"
|
605
|
+
end
|
606
|
+
|
607
|
+
# Adds a new column to the named table.
|
608
|
+
# See TableDefinition#column for details of the options you can use.
|
609
|
+
def add_column(table_name, column_name, type, options = {})
|
610
|
+
add_column_sql = "ALTER TABLE #{table_name} ADD #{quote_column_name(column_name)} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
|
611
|
+
add_column_options!(add_column_sql, options)
|
612
|
+
# TODO: Add support to mimic date columns, using constraints to mark them as such in the database
|
613
|
+
# add_column_sql << " CONSTRAINT ck__#{table_name}__#{column_name}__date_only CHECK ( CONVERT(CHAR(12), #{quote_column_name(column_name)}, 14)='00:00:00:000' )" if type == :date
|
614
|
+
execute(add_column_sql)
|
615
|
+
end
|
616
|
+
|
617
|
+
def rename_column(table, column, new_column_name)
|
618
|
+
execute "EXEC sp_rename '#{table}.#{column}', '#{new_column_name}'"
|
619
|
+
end
|
620
|
+
|
621
|
+
# database_statements line 108 Set the SQL specific rowlocking
|
622
|
+
# was previously generating invalid syntax for SQL server
|
623
|
+
def add_lock!(sql, options)
|
624
|
+
case lock = options[:lock]
|
625
|
+
when true then sql << "WITH(HOLDLOCK, ROWLOCK) "
|
626
|
+
when String then sql << "#{lock} "
|
627
|
+
end
|
628
|
+
end
|
629
|
+
|
630
|
+
# Delete the default options if it's nil. Adapter was adding default NULL contraints
|
631
|
+
# to all columns which caused problems when trying to alter the column
|
632
|
+
def add_column_options!(sql, options) #:nodoc:
|
633
|
+
options.delete(:default) if options[:default].nil?
|
634
|
+
super
|
635
|
+
end
|
636
|
+
|
637
|
+
# calculate column size to fix issue
|
638
|
+
# size XXXXX given to the column 'data' exceeds the maximum allowed for any data type (8000)
|
639
|
+
def column_total_size(table_name)
|
640
|
+
return nil if table_name.blank?
|
641
|
+
table_name = table_name.to_s if table_name.is_a?(Symbol)
|
642
|
+
table_name = table_name.split('.')[-1] unless table_name.nil?
|
643
|
+
table_name = table_name.gsub(/[\[\]]/, '')
|
644
|
+
sql = %Q{
|
645
|
+
SELECT SUM(COL_LENGTH(cols.TABLE_NAME, cols.COLUMN_NAME)) as Length
|
646
|
+
FROM INFORMATION_SCHEMA.COLUMNS cols
|
647
|
+
WHERE cols.TABLE_NAME = '#{table_name}'
|
648
|
+
}
|
649
|
+
# Comment out if you want to have the Columns select statment logged.
|
650
|
+
# Personally, I think it adds unnecessary bloat to the log. If you do
|
651
|
+
# comment it out, make sure to un-comment the "result" line that follows
|
652
|
+
result = log(sql, name) do
|
653
|
+
@sql_connection_lock.synchronize(:EX) { @connection.select_all(sql) }
|
654
|
+
end
|
655
|
+
field[:Length].to_i
|
656
|
+
end
|
657
|
+
|
658
|
+
# if binary, calculate te the remaining amount for size
|
659
|
+
# issue: size XXXXX given to the column 'data' exceeds the maximum allowed for any data type (8000)
|
660
|
+
def change_column(table_name, column_name, type, options = {}) #:nodoc:
|
661
|
+
# $log.debug "change_column"
|
662
|
+
sql_commands = []
|
663
|
+
|
664
|
+
# Handle conversion of text columns to binary columns by first
|
665
|
+
# converting to varchar. We determine the amount of space left for the
|
666
|
+
# columns so we can get the most out of the conversion.
|
667
|
+
if type == :binary
|
668
|
+
col = self.columns(table_name, column_name)
|
669
|
+
sql_commands << "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(:string, 8000 - column_total_size(table_name))}" if col && col.type == :text
|
670
|
+
end
|
671
|
+
|
672
|
+
sql_commands << "ALTER TABLE #{table_name} ALTER COLUMN #{column_name} #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}"
|
673
|
+
if options_include_default?(options)
|
674
|
+
remove_default_constraint(table_name, column_name)
|
675
|
+
sql_commands << "ALTER TABLE #{table_name} ADD CONSTRAINT DF_#{table_name}_#{column_name} DEFAULT #{quote(options[:default], options[:column])} FOR #{column_name}"
|
676
|
+
end
|
677
|
+
sql_commands.each {|c|
|
678
|
+
execute(c)
|
679
|
+
}
|
680
|
+
end
|
681
|
+
|
682
|
+
def remove_column(table_name, column_name)
|
683
|
+
remove_check_constraints(table_name, column_name)
|
684
|
+
remove_default_constraint(table_name, column_name)
|
685
|
+
execute "ALTER TABLE [#{table_name}] DROP COLUMN [#{column_name}]"
|
686
|
+
end
|
687
|
+
|
688
|
+
def remove_default_constraint(table_name, column_name)
|
689
|
+
constraints = select "select def.name from sysobjects def, syscolumns col, sysobjects tab where col.cdefault = def.id and col.name = '#{column_name}' and tab.name = '#{table_name}' and col.id = tab.id"
|
690
|
+
|
691
|
+
constraints.each do |constraint|
|
692
|
+
execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["name"]}"
|
693
|
+
end
|
694
|
+
end
|
695
|
+
|
696
|
+
def remove_check_constraints(table_name, column_name)
|
697
|
+
# TODO remove all constraints in single method
|
698
|
+
constraints = select "SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE where TABLE_NAME = '#{table_name}' and COLUMN_NAME = '#{column_name}'"
|
699
|
+
constraints.each do |constraint|
|
700
|
+
execute "ALTER TABLE #{table_name} DROP CONSTRAINT #{constraint["CONSTRAINT_NAME"]}"
|
701
|
+
end
|
702
|
+
end
|
703
|
+
|
704
|
+
def remove_index(table_name, options = {})
|
705
|
+
execute "DROP INDEX #{table_name}.#{quote_column_name(index_name(table_name, options))}"
|
706
|
+
end
|
707
|
+
|
708
|
+
private
|
709
|
+
def select(sql, name = nil)
|
710
|
+
repair_special_columns(sql)
|
711
|
+
if match = query_has_limit_and_offset?(sql)
|
712
|
+
matched, limit, offset = *match
|
713
|
+
execute(sql)
|
714
|
+
# SET ROWCOUNT n causes all statements to only affect n rows, which we use
|
715
|
+
# to delete offset rows from the temporary table
|
716
|
+
execute("SET ROWCOUNT #{offset}")
|
717
|
+
execute("DELETE from #limit_offset_temp")
|
718
|
+
execute("SET ROWCOUNT 0")
|
719
|
+
result = execute_select("SELECT * FROM #limit_offset_temp")
|
720
|
+
execute("DROP TABLE #limit_offset_temp")
|
721
|
+
result
|
722
|
+
else
|
723
|
+
execute_select(sql)
|
724
|
+
end
|
725
|
+
end
|
726
|
+
|
727
|
+
def execute_select(sql)
|
728
|
+
result = []
|
729
|
+
execute(sql) do |handle|
|
730
|
+
handle.each do |row|
|
731
|
+
row_hash = {}
|
732
|
+
row.each_with_index do |value, i|
|
733
|
+
if value.is_a? DBI::Timestamp
|
734
|
+
value = DateTime.new(value.year, value.month, value.day, value.hour, value.minute, value.sec)
|
735
|
+
end
|
736
|
+
row_hash[handle.column_names[i]] = value
|
737
|
+
end
|
738
|
+
result << row_hash
|
739
|
+
end
|
740
|
+
end
|
741
|
+
result
|
742
|
+
end
|
743
|
+
|
744
|
+
def query_has_limit_and_offset?(sql)
|
745
|
+
match = sql.match(/#limit_offset_temp -- limit => (\d+) offset => (\d+)/)
|
746
|
+
end
|
747
|
+
|
748
|
+
# Turns IDENTITY_INSERT ON for table during execution of the block
|
749
|
+
# N.B. This sets the state of IDENTITY_INSERT to OFF after the
|
750
|
+
# block has been executed without regard to its previous state
|
751
|
+
|
752
|
+
def with_identity_insert_enabled(table_name, &block)
|
753
|
+
set_identity_insert(table_name, true)
|
754
|
+
yield
|
755
|
+
ensure
|
756
|
+
set_identity_insert(table_name, false)
|
757
|
+
end
|
758
|
+
|
759
|
+
def set_identity_insert(table_name, enable = true)
|
760
|
+
execute "SET IDENTITY_INSERT #{table_name} #{enable ? 'ON' : 'OFF'}"
|
761
|
+
rescue Exception => e
|
762
|
+
raise ActiveRecordError, "IDENTITY_INSERT could not be turned #{enable ? 'ON' : 'OFF'} for table #{table_name}"
|
763
|
+
end
|
764
|
+
|
765
|
+
def get_table_name(sql)
|
766
|
+
if sql =~ /^\s*insert\s+into\s+([^\(\s]+)\s*|^\s*update\s+([^\(\s]+)\s*/i
|
767
|
+
$1
|
768
|
+
elsif sql =~ /from\s+([^\(\s]+)\s*/i
|
769
|
+
$1
|
770
|
+
else
|
771
|
+
nil
|
772
|
+
end
|
773
|
+
end
|
774
|
+
|
775
|
+
def identity_column(table_name)
|
776
|
+
@table_columns = {} unless @table_columns
|
777
|
+
@table_columns[table_name] = columns(table_name) if @table_columns[table_name] == nil
|
778
|
+
@table_columns[table_name].each do |col|
|
779
|
+
return col.name if col.identity
|
780
|
+
end
|
781
|
+
|
782
|
+
return nil
|
783
|
+
end
|
784
|
+
|
785
|
+
def query_requires_identity_insert?(sql)
|
786
|
+
table_name = get_table_name(sql)
|
787
|
+
id_column = identity_column(table_name)
|
788
|
+
sql =~ /\[#{id_column}\]/ ? table_name : nil
|
789
|
+
end
|
790
|
+
|
791
|
+
def change_order_direction(order)
|
792
|
+
order.split(",").collect {|fragment|
|
793
|
+
case fragment
|
794
|
+
when /\bDESC\b/i then fragment.gsub(/\bDESC\b/i, "ASC")
|
795
|
+
when /\bASC\b/i then fragment.gsub(/\bASC\b/i, "DESC")
|
796
|
+
else String.new(fragment).split(',').join(' DESC,') + ' DESC'
|
797
|
+
end
|
798
|
+
}.join(",")
|
799
|
+
end
|
800
|
+
|
801
|
+
def get_special_columns(table_name)
|
802
|
+
special = []
|
803
|
+
@table_columns ||= {}
|
804
|
+
@table_columns[table_name] ||= columns(table_name)
|
805
|
+
@table_columns[table_name].each do |col|
|
806
|
+
special << col.name if col.is_special
|
807
|
+
end
|
808
|
+
special
|
809
|
+
end
|
810
|
+
|
811
|
+
def repair_special_columns(sql)
|
812
|
+
special_cols = get_special_columns(get_table_name(sql))
|
813
|
+
for col in special_cols.to_a
|
814
|
+
sql.gsub!(Regexp.new(" #{col.to_s} = "), " #{col.to_s} LIKE ")
|
815
|
+
sql.gsub!(/ORDER BY #{col.to_s}/i, '')
|
816
|
+
end
|
817
|
+
sql
|
818
|
+
end
|
819
|
+
|
820
|
+
end #class SQLServerAdapter < AbstractAdapter
|
821
|
+
|
822
|
+
# If value is a string and destination column is binary, don't quote the string for MS SQL
|
823
|
+
module Quoting
|
824
|
+
# Quotes the column value to help prevent
|
825
|
+
# {SQL injection attacks}[http://en.wikipedia.org/wiki/SQL_injection].
|
826
|
+
def quote(value, column = nil)
|
827
|
+
# records are quoted as their primary key
|
828
|
+
return value.quoted_id if value.respond_to?(:quoted_id)
|
829
|
+
# puts "Type: #{column.type} Name: #{column.name}" if column
|
830
|
+
case value
|
831
|
+
when String, ActiveSupport::Multibyte::Chars
|
832
|
+
value = value.to_s
|
833
|
+
if column && column.type == :binary && column.class.respond_to?(:string_to_binary)
|
834
|
+
column.class.string_to_binary(value)
|
835
|
+
elsif column && [:integer, :float].include?(column.type)
|
836
|
+
value = column.type == :integer ? value.to_i : value.to_f
|
837
|
+
value.to_s
|
838
|
+
else
|
839
|
+
"'#{quote_string(value)}'" # ' (for ruby-mode)
|
840
|
+
end
|
841
|
+
when NilClass then "NULL"
|
842
|
+
when TrueClass then (column && column.type == :integer ? '1' : quoted_true)
|
843
|
+
when FalseClass then (column && column.type == :integer ? '0' : quoted_false)
|
844
|
+
when Float, Fixnum, Bignum then value.to_s
|
845
|
+
# BigDecimals need to be output in a non-normalized form and quoted.
|
846
|
+
when BigDecimal then value.to_s('F')
|
847
|
+
when Date then "'#{value.to_s}'"
|
848
|
+
when Time, DateTime then "'#{quoted_date(value)}'"
|
849
|
+
else "'#{quote_string(value.to_yaml)}'"
|
850
|
+
end
|
851
|
+
end
|
852
|
+
end
|
853
|
+
|
854
|
+
end #module ConnectionAdapters
|
855
|
+
end #module ActiveRecord
|
metadata
ADDED
@@ -0,0 +1,61 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: akitaonrails-activerecord-sqlserver-adapter
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Tom Ward
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
|
12
|
+
date: 2008-08-13 00:00:00 -07:00
|
13
|
+
default_executable:
|
14
|
+
dependencies:
|
15
|
+
- !ruby/object:Gem::Dependency
|
16
|
+
name: activerecord
|
17
|
+
version_requirement:
|
18
|
+
version_requirements: !ruby/object:Gem::Requirement
|
19
|
+
requirements:
|
20
|
+
- - ">="
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: 1.15.5.7843
|
23
|
+
version:
|
24
|
+
description:
|
25
|
+
email: tom@popdog.net
|
26
|
+
executables: []
|
27
|
+
|
28
|
+
extensions: []
|
29
|
+
|
30
|
+
extra_rdoc_files: []
|
31
|
+
|
32
|
+
files:
|
33
|
+
- lib/active_record/connection_adapters/sqlserver_adapter.rb
|
34
|
+
has_rdoc: false
|
35
|
+
homepage: http://wiki.rubyonrails.org/rails/pages/SQL+Server
|
36
|
+
post_install_message:
|
37
|
+
rdoc_options: []
|
38
|
+
|
39
|
+
require_paths:
|
40
|
+
- lib
|
41
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
42
|
+
requirements:
|
43
|
+
- - ">="
|
44
|
+
- !ruby/object:Gem::Version
|
45
|
+
version: "0"
|
46
|
+
version:
|
47
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
48
|
+
requirements:
|
49
|
+
- - ">="
|
50
|
+
- !ruby/object:Gem::Version
|
51
|
+
version: "0"
|
52
|
+
version:
|
53
|
+
requirements: []
|
54
|
+
|
55
|
+
rubyforge_project: activerecord
|
56
|
+
rubygems_version: 1.2.0
|
57
|
+
signing_key:
|
58
|
+
specification_version: 2
|
59
|
+
summary: SQL Server adapter for Active Record
|
60
|
+
test_files: []
|
61
|
+
|