odbc_adapter 3.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +9 -0
- data/.travis.yml +29 -0
- data/Gemfile +6 -0
- data/LICENSE +21 -0
- data/README.md +37 -0
- data/Rakefile +10 -0
- data/bin/ci-setup +13 -0
- data/bin/console +7 -0
- data/bin/setup +8 -0
- data/lib/active_record/connection_adapters/odbc_adapter.rb +127 -0
- data/lib/odbc_adapter.rb +17 -0
- data/lib/odbc_adapter/adapters/mysql_odbc_adapter.rb +141 -0
- data/lib/odbc_adapter/adapters/postgresql_odbc_adapter.rb +230 -0
- data/lib/odbc_adapter/column.rb +67 -0
- data/lib/odbc_adapter/column_metadata.rb +77 -0
- data/lib/odbc_adapter/database_limits.rb +10 -0
- data/lib/odbc_adapter/database_statements.rb +254 -0
- data/lib/odbc_adapter/dbms.rb +50 -0
- data/lib/odbc_adapter/quoting.rb +81 -0
- data/lib/odbc_adapter/schema_statements.rb +16 -0
- data/lib/odbc_adapter/type_caster.rb +42 -0
- data/lib/odbc_adapter/version.rb +3 -0
- data/odbc_adapter.gemspec +28 -0
- metadata +123 -0
@@ -0,0 +1,230 @@
|
|
1
|
+
module ODBCAdapter
|
2
|
+
module Adapters
|
3
|
+
# Overrides specific to PostgreSQL. Mostly taken from
|
4
|
+
# ActiveRecord::ConnectionAdapters::PostgreSQLAdapter
|
5
|
+
class PostgreSQLODBCAdapter < ActiveRecord::ConnectionAdapters::ODBCAdapter
|
6
|
+
class BindSubstitution < Arel::Visitors::PostgreSQL
|
7
|
+
include Arel::Visitors::BindVisitor
|
8
|
+
end
|
9
|
+
|
10
|
+
class PostgreSQLColumn < Column
|
11
|
+
def initialize(name, default, sql_type, native_type, null = true, scale = nil, native_types = nil, limit = nil)
|
12
|
+
super
|
13
|
+
@default = extract_default
|
14
|
+
end
|
15
|
+
|
16
|
+
private
|
17
|
+
|
18
|
+
def extract_default
|
19
|
+
case @default
|
20
|
+
when NilClass
|
21
|
+
nil
|
22
|
+
# Numeric types
|
23
|
+
when /\A\(?(-?\d+(\.\d*)?\)?(::bigint)?)\z/ then $1
|
24
|
+
# Character types
|
25
|
+
when /\A\(?'(.*)'::.*\b(?:character varying|bpchar|text)\z/m then $1
|
26
|
+
# Binary data types
|
27
|
+
when /\A'(.*)'::bytea\z/m then $1
|
28
|
+
# Date/time types
|
29
|
+
when /\A'(.+)'::(?:time(?:stamp)? with(?:out)? time zone|date)\z/ then $1
|
30
|
+
when /\A'(.*)'::interval\z/ then $1
|
31
|
+
# Boolean type
|
32
|
+
when 'true' then true
|
33
|
+
when 'false' then false
|
34
|
+
# Geometric types
|
35
|
+
when /\A'(.*)'::(?:point|line|lseg|box|"?path"?|polygon|circle)\z/ then $1
|
36
|
+
# Network address types
|
37
|
+
when /\A'(.*)'::(?:cidr|inet|macaddr)\z/ then $1
|
38
|
+
# Bit string types
|
39
|
+
when /\AB'(.*)'::"?bit(?: varying)?"?\z/ then $1
|
40
|
+
# XML type
|
41
|
+
when /\A'(.*)'::xml\z/m then $1
|
42
|
+
# Arrays
|
43
|
+
when /\A'(.*)'::"?\D+"?\[\]\z/ then $1
|
44
|
+
# Object identifier types
|
45
|
+
when /\A-?\d+\z/ then $1
|
46
|
+
else
|
47
|
+
# Anything else is blank, some user type, or some function
|
48
|
+
# and we can't know the value of that, so return nil.
|
49
|
+
nil
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
PRIMARY_KEY = 'SERIAL PRIMARY KEY'
|
55
|
+
|
56
|
+
# Override the default column class
|
57
|
+
def column_class
|
58
|
+
PostgreSQLColumn
|
59
|
+
end
|
60
|
+
|
61
|
+
# Filter for ODBCAdapter#tables
|
62
|
+
# Omits table from #tables if table_filter returns true
|
63
|
+
def table_filter(schema_name, table_type)
|
64
|
+
%w[information_schema pg_catalog].include?(schema_name) || table_type !~ /TABLE/i
|
65
|
+
end
|
66
|
+
|
67
|
+
# Returns the sequence name for a table's primary key or some other specified key.
|
68
|
+
def default_sequence_name(table_name, pk = nil) #:nodoc:
|
69
|
+
serial_sequence(table_name, pk || 'id').split('.').last
|
70
|
+
rescue ActiveRecord::StatementInvalid
|
71
|
+
"#{table_name}_#{pk || 'id'}_seq"
|
72
|
+
end
|
73
|
+
|
74
|
+
# Returns the current ID of a table's sequence.
|
75
|
+
def last_insert_id(sequence_name)
|
76
|
+
r = exec_query("SELECT currval('#{sequence_name}')", 'SQL')
|
77
|
+
Integer(r.rows.first.first)
|
78
|
+
end
|
79
|
+
|
80
|
+
# Executes an INSERT query and returns the new record's ID
|
81
|
+
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
|
82
|
+
unless pk
|
83
|
+
table_ref = extract_table_ref_from_insert_sql(sql)
|
84
|
+
pk = primary_key(table_ref) if table_ref
|
85
|
+
end
|
86
|
+
|
87
|
+
if pk
|
88
|
+
select_value("#{sql} RETURNING #{quote_column_name(pk)}")
|
89
|
+
else
|
90
|
+
super
|
91
|
+
end
|
92
|
+
end
|
93
|
+
alias :create :insert
|
94
|
+
|
95
|
+
def sql_for_insert(sql, pk, id_value, sequence_name, binds)
|
96
|
+
unless pk
|
97
|
+
table_ref = extract_table_ref_from_insert_sql(sql)
|
98
|
+
pk = primary_key(table_ref) if table_ref
|
99
|
+
end
|
100
|
+
|
101
|
+
sql = "#{sql} RETURNING #{quote_column_name(pk)}" if pk
|
102
|
+
[sql, binds]
|
103
|
+
end
|
104
|
+
|
105
|
+
def type_cast(value, column)
|
106
|
+
return super unless column
|
107
|
+
|
108
|
+
case value
|
109
|
+
when String
|
110
|
+
return super unless 'bytea' == column.sql_type
|
111
|
+
{ value: value, format: 1 }
|
112
|
+
else
|
113
|
+
super
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
# Quotes a string, escaping any ' (single quote) and \ (backslash)
|
118
|
+
# characters.
|
119
|
+
def quote_string(string)
|
120
|
+
string.gsub(/\\/, '\&\&').gsub(/'/, "''")
|
121
|
+
end
|
122
|
+
|
123
|
+
def quoted_true
|
124
|
+
"'t'"
|
125
|
+
end
|
126
|
+
|
127
|
+
def quoted_false
|
128
|
+
"'f'"
|
129
|
+
end
|
130
|
+
|
131
|
+
def disable_referential_integrity #:nodoc:
|
132
|
+
execute(tables.map { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(';'))
|
133
|
+
yield
|
134
|
+
ensure
|
135
|
+
execute(tables.map { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(';'))
|
136
|
+
end
|
137
|
+
|
138
|
+
# Create a new PostgreSQL database. Options include <tt>:owner</tt>, <tt>:template</tt>,
|
139
|
+
# <tt>:encoding</tt>, <tt>:tablespace</tt>, and <tt>:connection_limit</tt> (note that MySQL
|
140
|
+
# uses <tt>:charset</tt> while PostgreSQL uses <tt>:encoding</tt>).
|
141
|
+
#
|
142
|
+
# Example:
|
143
|
+
# create_database config[:database], config
|
144
|
+
# create_database 'foo_development', :encoding => 'unicode'
|
145
|
+
def create_database(name, options = {})
|
146
|
+
options = options.reverse_merge(encoding: 'utf8')
|
147
|
+
|
148
|
+
option_string = options.symbolize_keys.sum do |key, value|
|
149
|
+
case key
|
150
|
+
when :owner
|
151
|
+
" OWNER = \"#{value}\""
|
152
|
+
when :template
|
153
|
+
" TEMPLATE = \"#{value}\""
|
154
|
+
when :encoding
|
155
|
+
" ENCODING = '#{value}'"
|
156
|
+
when :tablespace
|
157
|
+
" TABLESPACE = \"#{value}\""
|
158
|
+
when :connection_limit
|
159
|
+
" CONNECTION LIMIT = #{value}"
|
160
|
+
else
|
161
|
+
""
|
162
|
+
end
|
163
|
+
end
|
164
|
+
|
165
|
+
execute("CREATE DATABASE #{quote_table_name(name)}#{option_string}")
|
166
|
+
end
|
167
|
+
|
168
|
+
# Drops a PostgreSQL database.
|
169
|
+
#
|
170
|
+
# Example:
|
171
|
+
# drop_database 'matt_development'
|
172
|
+
def drop_database(name) #:nodoc:
|
173
|
+
execute "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
|
174
|
+
end
|
175
|
+
|
176
|
+
# Renames a table.
|
177
|
+
def rename_table(name, new_name)
|
178
|
+
execute("ALTER TABLE #{quote_table_name(name)} RENAME TO #{quote_table_name(new_name)}")
|
179
|
+
end
|
180
|
+
|
181
|
+
def change_column(table_name, column_name, type, options = {})
|
182
|
+
execute("ALTER TABLE #{table_name} ALTER #{column_name} TYPE #{type_to_sql(type, options[:limit], options[:precision], options[:scale])}")
|
183
|
+
change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
|
184
|
+
end
|
185
|
+
|
186
|
+
def change_column_default(table_name, column_name, default)
|
187
|
+
execute("ALTER TABLE #{table_name} ALTER COLUMN #{column_name} SET DEFAULT #{quote(default)}")
|
188
|
+
end
|
189
|
+
|
190
|
+
def rename_column(table_name, column_name, new_column_name)
|
191
|
+
execute("ALTER TABLE #{table_name} RENAME #{column_name} TO #{new_column_name}")
|
192
|
+
end
|
193
|
+
|
194
|
+
def remove_index!(_table_name, index_name)
|
195
|
+
execute("DROP INDEX #{quote_table_name(index_name)}")
|
196
|
+
end
|
197
|
+
|
198
|
+
def rename_index(table_name, old_name, new_name)
|
199
|
+
execute("ALTER INDEX #{quote_column_name(old_name)} RENAME TO #{quote_table_name(new_name)}")
|
200
|
+
end
|
201
|
+
|
202
|
+
# Returns a SELECT DISTINCT clause for a given set of columns and a given ORDER BY clause.
|
203
|
+
#
|
204
|
+
# PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and
|
205
|
+
# requires that the ORDER BY include the distinct column.
|
206
|
+
#
|
207
|
+
# distinct("posts.id", "posts.created_at desc")
|
208
|
+
def distinct(columns, orders)
|
209
|
+
return "DISTINCT #{columns}" if orders.empty?
|
210
|
+
|
211
|
+
# Construct a clean list of column names from the ORDER BY clause, removing
|
212
|
+
# any ASC/DESC modifiers
|
213
|
+
order_columns = orders.map { |s| s.gsub(/\s+(ASC|DESC)\s*(NULLS\s+(FIRST|LAST)\s*)?/i, '') }
|
214
|
+
order_columns.reject! { |c| c.blank? }
|
215
|
+
order_columns = order_columns.zip((0...order_columns.size).to_a).map { |s,i| "#{s} AS alias_#{i}" }
|
216
|
+
|
217
|
+
"DISTINCT #{columns}, #{order_columns * ', '}"
|
218
|
+
end
|
219
|
+
|
220
|
+
private
|
221
|
+
|
222
|
+
def serial_sequence(table, column)
|
223
|
+
result = exec_query(<<-eosql, 'SCHEMA')
|
224
|
+
SELECT pg_get_serial_sequence('#{table}', '#{column}')
|
225
|
+
eosql
|
226
|
+
result.rows.first.first
|
227
|
+
end
|
228
|
+
end
|
229
|
+
end
|
230
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
module ODBCAdapter
|
2
|
+
class Column < ActiveRecord::ConnectionAdapters::Column
|
3
|
+
def initialize(name, default, sql_type, native_type, null = true, scale = nil, native_types = nil, limit = nil)
|
4
|
+
@name = name
|
5
|
+
@default = default
|
6
|
+
@sql_type = native_type.to_s
|
7
|
+
@native_type = native_type.to_s
|
8
|
+
@null = null
|
9
|
+
@precision = extract_precision(sql_type, limit)
|
10
|
+
@scale = extract_scale(sql_type, scale)
|
11
|
+
@type = genericize(sql_type, @scale, native_types)
|
12
|
+
@primary = nil
|
13
|
+
end
|
14
|
+
|
15
|
+
private
|
16
|
+
|
17
|
+
# Maps an ODBC SQL type to an ActiveRecord abstract data type
|
18
|
+
#
|
19
|
+
# c.f. Mappings in ConnectionAdapters::Column#simplified_type based on
|
20
|
+
# native column type declaration
|
21
|
+
#
|
22
|
+
# See also:
|
23
|
+
# Column#klass (schema_definitions.rb) for the Ruby class corresponding
|
24
|
+
# to each abstract data type.
|
25
|
+
def genericize(sql_type, scale, native_types)
|
26
|
+
case sql_type
|
27
|
+
when ODBC::SQL_BIT then :boolean
|
28
|
+
when ODBC::SQL_CHAR, ODBC::SQL_VARCHAR then :string
|
29
|
+
when ODBC::SQL_LONGVARCHAR then :text
|
30
|
+
when ODBC::SQL_WCHAR, ODBC::SQL_WVARCHAR then :string
|
31
|
+
when ODBC::SQL_WLONGVARCHAR then :text
|
32
|
+
when ODBC::SQL_TINYINT, ODBC::SQL_SMALLINT, ODBC::SQL_INTEGER, ODBC::SQL_BIGINT then :integer
|
33
|
+
when ODBC::SQL_REAL, ODBC::SQL_FLOAT, ODBC::SQL_DOUBLE then :float
|
34
|
+
# If SQLGetTypeInfo output of ODBC driver doesn't include a mapping
|
35
|
+
# to a native type from SQL_DECIMAL/SQL_NUMERIC, map to :float
|
36
|
+
when ODBC::SQL_DECIMAL, ODBC::SQL_NUMERIC then numeric_type(scale, native_types)
|
37
|
+
when ODBC::SQL_BINARY, ODBC::SQL_VARBINARY, ODBC::SQL_LONGVARBINARY then :binary
|
38
|
+
# SQL_DATETIME is an alias for SQL_DATE in ODBC's sql.h & sqlext.h
|
39
|
+
when ODBC::SQL_DATE, ODBC::SQL_TYPE_DATE, ODBC::SQL_DATETIME then :date
|
40
|
+
when ODBC::SQL_TIME, ODBC::SQL_TYPE_TIME then :time
|
41
|
+
when ODBC::SQL_TIMESTAMP, ODBC::SQL_TYPE_TIMESTAMP then :timestamp
|
42
|
+
when ODBC::SQL_GUID then :string
|
43
|
+
else
|
44
|
+
# when SQL_UNKNOWN_TYPE
|
45
|
+
# (ruby-odbc driver doesn't support following ODBC SQL types:
|
46
|
+
# SQL_WCHAR, SQL_WVARCHAR, SQL_WLONGVARCHAR, SQL_INTERVAL_xxx)
|
47
|
+
raise ArgumentError, "Unsupported ODBC SQL type [#{odbcSqlType}]"
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
# Ignore the ODBC precision of SQL types which don't take
|
52
|
+
# an explicit precision when defining a column
|
53
|
+
def extract_precision(sql_type, precision)
|
54
|
+
precision if [ODBC::SQL_DECIMAL, ODBC::SQL_NUMERIC].include?(sql_type)
|
55
|
+
end
|
56
|
+
|
57
|
+
# Ignore the ODBC scale of SQL types which don't take
|
58
|
+
# an explicit scale when defining a column
|
59
|
+
def extract_scale(sql_type, scale)
|
60
|
+
scale || 0 if [ODBC::SQL_DECIMAL, ODBC::SQL_NUMERIC].include?(sql_type)
|
61
|
+
end
|
62
|
+
|
63
|
+
def numeric_type(scale, native_types)
|
64
|
+
scale.nil? || scale == 0 ? :integer : (native_types[:decimal].nil? ? :float : :decimal)
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,77 @@
|
|
1
|
+
module ODBCAdapter
|
2
|
+
class ColumnMetadata
|
3
|
+
GENERICS = {
|
4
|
+
primary_key: [ODBC::SQL_INTEGER, ODBC::SQL_SMALLINT],
|
5
|
+
string: [ODBC::SQL_VARCHAR],
|
6
|
+
text: [ODBC::SQL_LONGVARCHAR, ODBC::SQL_VARCHAR],
|
7
|
+
integer: [ODBC::SQL_INTEGER, ODBC::SQL_SMALLINT],
|
8
|
+
decimal: [ODBC::SQL_NUMERIC, ODBC::SQL_DECIMAL],
|
9
|
+
float: [ODBC::SQL_DOUBLE, ODBC::SQL_REAL],
|
10
|
+
datetime: [ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP],
|
11
|
+
timestamp: [ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP],
|
12
|
+
time: [ODBC::SQL_TYPE_TIME, ODBC::SQL_TIME, ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP],
|
13
|
+
date: [ODBC::SQL_TYPE_DATE, ODBC::SQL_DATE, ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP],
|
14
|
+
binary: [ODBC::SQL_LONGVARBINARY, ODBC::SQL_VARBINARY],
|
15
|
+
boolean: [ODBC::SQL_BIT, ODBC::SQL_TINYINT, ODBC::SQL_SMALLINT, ODBC::SQL_INTEGER]
|
16
|
+
}
|
17
|
+
|
18
|
+
attr_reader :adapter
|
19
|
+
|
20
|
+
def initialize(adapter)
|
21
|
+
@adapter = adapter
|
22
|
+
end
|
23
|
+
|
24
|
+
# TODO: implement boolean column surrogates
|
25
|
+
def native_database_types
|
26
|
+
grouped = reported_types.group_by { |row| row[1] }
|
27
|
+
|
28
|
+
GENERICS.each_with_object({}) do |(abstract, candidates), mapped|
|
29
|
+
candidates.detect do |candidate|
|
30
|
+
next unless grouped[candidate]
|
31
|
+
mapped[abstract] = native_type_mapping(abstract, grouped[candidate])
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# Creates a Hash describing a mapping from an abstract type to a
|
39
|
+
# DBMS native type for use by #native_database_types
|
40
|
+
def native_type_mapping(abstract, rows)
|
41
|
+
# The appropriate SQL for :primary_key is hard to derive as
|
42
|
+
# ODBC doesn't provide any info on a DBMS's native syntax for
|
43
|
+
# autoincrement columns. So we use a lookup instead.
|
44
|
+
return adapter.class::PRIMARY_KEY if abstract == :primary_key
|
45
|
+
selected_row = rows[0]
|
46
|
+
|
47
|
+
# If more than one native type corresponds to the SQL type we're
|
48
|
+
# handling, the type in the first descriptor should be the
|
49
|
+
# best match, because the ODBC specification states that
|
50
|
+
# SQLGetTypeInfo returns the results ordered by SQL type and then by
|
51
|
+
# how closely the native type maps to that SQL type.
|
52
|
+
# But, for :text and :binary, select the native type with the
|
53
|
+
# largest capacity. (Compare SQLGetTypeInfo:COLUMN_SIZE values)
|
54
|
+
selected_row = rows.max_by { |row| row[2] } if [:text, :binary].include?(abstract)
|
55
|
+
result = { name: selected_row[0] } # SQLGetTypeInfo: TYPE_NAME
|
56
|
+
|
57
|
+
create_params = selected_row[5]
|
58
|
+
# Depending on the column type, the CREATE_PARAMS keywords can
|
59
|
+
# include length, precision or scale.
|
60
|
+
if create_params && create_params.strip.length > 0 && abstract != :decimal
|
61
|
+
result[:limit] = selected_row[2] # SQLGetTypeInfo: COL_SIZE
|
62
|
+
end
|
63
|
+
|
64
|
+
result
|
65
|
+
end
|
66
|
+
|
67
|
+
def reported_types
|
68
|
+
@reported_types ||=
|
69
|
+
begin
|
70
|
+
stmt = adapter.raw_connection.types
|
71
|
+
stmt.fetch_all
|
72
|
+
ensure
|
73
|
+
stmt.drop unless stmt.nil?
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
@@ -0,0 +1,10 @@
|
|
1
|
+
module ODBCAdapter
|
2
|
+
module DatabaseLimits
|
3
|
+
# Returns the maximum length of a table name.
|
4
|
+
def table_alias_length
|
5
|
+
max_identifier_length = dbms.field_for(ODBC::SQL_MAX_IDENTIFIER_LEN)
|
6
|
+
max_table_name_length = dbms.field_for(ODBC::SQL_MAX_TABLE_NAME_LEN)
|
7
|
+
[max_identifier_length, max_table_name_length].max
|
8
|
+
end
|
9
|
+
end
|
10
|
+
end
|
@@ -0,0 +1,254 @@
|
|
1
|
+
module ODBCAdapter
|
2
|
+
module DatabaseStatements
|
3
|
+
# ODBC constants missing from Christian Werner's Ruby ODBC driver
|
4
|
+
SQL_NO_NULLS = 0
|
5
|
+
SQL_NULLABLE = 1
|
6
|
+
SQL_NULLABLE_UNKNOWN = 2
|
7
|
+
|
8
|
+
# Returns an array of arrays containing the field values.
|
9
|
+
# Order is the same as that returned by #columns.
|
10
|
+
def select_rows(sql, name = nil)
|
11
|
+
log(sql, name) do
|
12
|
+
stmt = @connection.run(sql)
|
13
|
+
result = stmt.fetch_all
|
14
|
+
stmt.drop
|
15
|
+
result
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
# Executes the SQL statement in the context of this connection.
|
20
|
+
# Returns the number of rows affected.
|
21
|
+
def execute(sql, name = nil)
|
22
|
+
log(sql, name) do
|
23
|
+
@connection.do(sql)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
# Executes +sql+ statement in the context of this connection using
|
28
|
+
# +binds+ as the bind substitutes. +name+ is logged along with
|
29
|
+
# the executed +sql+ statement.
|
30
|
+
def exec_query(sql, name = 'SQL', binds = [])
|
31
|
+
log(sql, name) do
|
32
|
+
stmt = @connection.run(sql)
|
33
|
+
columns = stmt.columns
|
34
|
+
values = stmt.to_a
|
35
|
+
stmt.drop
|
36
|
+
|
37
|
+
casters = TypeCaster.build_from(columns.values)
|
38
|
+
if casters.any?
|
39
|
+
values.each do |row|
|
40
|
+
casters.each { |caster| row[caster.idx] = caster.cast(row[caster.idx]) }
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
values = dbms_type_cast(columns.values, values)
|
45
|
+
column_names = columns.keys.map { |key| format_case(key) }
|
46
|
+
result = ActiveRecord::Result.new(column_names, values)
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
# Begins the transaction (and turns off auto-committing).
|
51
|
+
def begin_db_transaction
|
52
|
+
@connection.autocommit = false
|
53
|
+
end
|
54
|
+
|
55
|
+
# Commits the transaction (and turns on auto-committing).
|
56
|
+
def commit_db_transaction
|
57
|
+
@connection.commit
|
58
|
+
@connection.autocommit = true
|
59
|
+
end
|
60
|
+
|
61
|
+
# Rolls back the transaction (and turns on auto-committing). Must be
|
62
|
+
# done if the transaction block raises an exception or returns false.
|
63
|
+
def rollback_db_transaction
|
64
|
+
@connection.rollback
|
65
|
+
@connection.autocommit = true
|
66
|
+
end
|
67
|
+
|
68
|
+
# Returns the default sequence name for a table.
|
69
|
+
# Used for databases which don't support an autoincrementing column
|
70
|
+
# type, but do support sequences.
|
71
|
+
def default_sequence_name(table, _column)
|
72
|
+
"#{table}_seq"
|
73
|
+
end
|
74
|
+
|
75
|
+
def recreate_database(name, options = {})
|
76
|
+
drop_database(name)
|
77
|
+
create_database(name, options)
|
78
|
+
end
|
79
|
+
|
80
|
+
def current_database
|
81
|
+
dbms.field_for(ODBC::SQL_DATABASE_NAME).strip
|
82
|
+
end
|
83
|
+
|
84
|
+
# Returns an array of table names, for database tables visible on the
|
85
|
+
# current connection.
|
86
|
+
def tables(_name = nil)
|
87
|
+
stmt = @connection.tables
|
88
|
+
result = stmt.fetch_all || []
|
89
|
+
stmt.drop
|
90
|
+
|
91
|
+
result.each_with_object([]) do |row, table_names|
|
92
|
+
schema_name, table_name, table_type = row[1..3]
|
93
|
+
next if respond_to?(:table_filtered?) && table_filtered?(schema_name, table_type)
|
94
|
+
table_names << format_case(table_name)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
# The class of the column to instantiate
|
99
|
+
def column_class
|
100
|
+
::ODBCAdapter::Column
|
101
|
+
end
|
102
|
+
|
103
|
+
# Returns an array of Column objects for the table specified by +table_name+.
|
104
|
+
def columns(table_name, name = nil)
|
105
|
+
stmt = @connection.columns(native_case(table_name.to_s))
|
106
|
+
result = stmt.fetch_all || []
|
107
|
+
stmt.drop
|
108
|
+
|
109
|
+
result.each_with_object([]) do |col, cols|
|
110
|
+
col_name = col[3] # SQLColumns: COLUMN_NAME
|
111
|
+
col_default = col[12] # SQLColumns: COLUMN_DEF
|
112
|
+
col_sql_type = col[4] # SQLColumns: DATA_TYPE
|
113
|
+
col_native_type = col[5] # SQLColumns: TYPE_NAME
|
114
|
+
col_limit = col[6] # SQLColumns: COLUMN_SIZE
|
115
|
+
col_scale = col[8] # SQLColumns: DECIMAL_DIGITS
|
116
|
+
|
117
|
+
# SQLColumns: IS_NULLABLE, SQLColumns: NULLABLE
|
118
|
+
col_nullable = nullability(col_name, col[17], col[10])
|
119
|
+
|
120
|
+
cols << column_class.new(format_case(col_name), col_default, col_sql_type, col_native_type, col_nullable, col_scale, native_database_types, col_limit)
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
# Returns an array of indexes for the given table.
|
125
|
+
def indexes(table_name, name = nil)
|
126
|
+
stmt = @connection.indexes(native_case(table_name.to_s))
|
127
|
+
result = stmt.fetch_all || []
|
128
|
+
stmt.drop unless stmt.nil?
|
129
|
+
|
130
|
+
index_cols = []
|
131
|
+
index_name = nil
|
132
|
+
unique = nil
|
133
|
+
|
134
|
+
result.each_with_object([]).with_index do |(row, indices), row_idx|
|
135
|
+
# Skip table statistics
|
136
|
+
next if row[6] == 0 # SQLStatistics: TYPE
|
137
|
+
|
138
|
+
if row[7] == 1 # SQLStatistics: ORDINAL_POSITION
|
139
|
+
# Start of column descriptor block for next index
|
140
|
+
index_cols = []
|
141
|
+
unique = row[3].zero? # SQLStatistics: NON_UNIQUE
|
142
|
+
index_name = String.new(row[5]) # SQLStatistics: INDEX_NAME
|
143
|
+
end
|
144
|
+
|
145
|
+
index_cols << format_case(row[8]) # SQLStatistics: COLUMN_NAME
|
146
|
+
next_row = result[row_idx + 1]
|
147
|
+
|
148
|
+
if (row_idx == result.length - 1) || (next_row[6] == 0 || next_row[7] == 1)
|
149
|
+
indices << IndexDefinition.new(table_name, format_case(index_name), unique, index_cols)
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
# Returns just a table's primary key
|
155
|
+
def primary_key(table_name)
|
156
|
+
stmt = @connection.primary_keys(native_case(table_name.to_s))
|
157
|
+
result = stmt.fetch_all || []
|
158
|
+
stmt.drop unless stmt.nil?
|
159
|
+
result[0] && result[0][3]
|
160
|
+
end
|
161
|
+
|
162
|
+
ERR_DUPLICATE_KEY_VALUE = 23505
|
163
|
+
|
164
|
+
def translate_exception(exception, message)
|
165
|
+
case exception.message[/^\d+/].to_i
|
166
|
+
when ERR_DUPLICATE_KEY_VALUE
|
167
|
+
ActiveRecord::RecordNotUnique.new(message, exception)
|
168
|
+
else
|
169
|
+
super
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
protected
|
174
|
+
|
175
|
+
# Returns an array of record hashes with the column names as keys and
|
176
|
+
# column values as values.
|
177
|
+
def select(sql, name = nil, binds = [])
|
178
|
+
exec_query(sql, name, binds).to_a
|
179
|
+
end
|
180
|
+
|
181
|
+
# Returns the last auto-generated ID from the affected table.
|
182
|
+
def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
|
183
|
+
begin
|
184
|
+
stmt = log(sql, name) { @connection.run(sql) }
|
185
|
+
table = extract_table_ref_from_insert_sql(sql)
|
186
|
+
|
187
|
+
seq = sequence_name || default_sequence_name(table, pk)
|
188
|
+
res = id_value || last_insert_id(table, seq, stmt)
|
189
|
+
ensure
|
190
|
+
stmt.drop unless stmt.nil?
|
191
|
+
end
|
192
|
+
res
|
193
|
+
end
|
194
|
+
|
195
|
+
private
|
196
|
+
|
197
|
+
def dbms_type_cast(columns, values)
|
198
|
+
values
|
199
|
+
end
|
200
|
+
|
201
|
+
def extract_table_ref_from_insert_sql(sql)
|
202
|
+
sql[/into\s+([^\(]*).*values\s*\(/i]
|
203
|
+
$1.strip if $1
|
204
|
+
end
|
205
|
+
|
206
|
+
# Assume received identifier is in DBMS's data dictionary case.
|
207
|
+
def format_case(identifier)
|
208
|
+
case dbms.field_for(ODBC::SQL_IDENTIFIER_CASE)
|
209
|
+
when ODBC::SQL_IC_UPPER
|
210
|
+
identifier =~ /[a-z]/ ? identifier : identifier.downcase
|
211
|
+
else
|
212
|
+
identifier
|
213
|
+
end
|
214
|
+
end
|
215
|
+
|
216
|
+
# In general, ActiveRecord uses lowercase attribute names. This may
|
217
|
+
# conflict with the database's data dictionary case.
|
218
|
+
#
|
219
|
+
# The ODBCAdapter uses the following conventions for databases
|
220
|
+
# which report SQL_IDENTIFIER_CASE = SQL_IC_UPPER:
|
221
|
+
# * if a name is returned from the DBMS in all uppercase, convert it
|
222
|
+
# to lowercase before returning it to ActiveRecord.
|
223
|
+
# * if a name is returned from the DBMS in lowercase or mixed case,
|
224
|
+
# assume the underlying schema object's name was quoted when
|
225
|
+
# the schema object was created. Leave the name untouched before
|
226
|
+
# returning it to ActiveRecord.
|
227
|
+
# * before making an ODBC catalog call, if a supplied identifier is all
|
228
|
+
# lowercase, convert it to uppercase. Leave mixed case or all
|
229
|
+
# uppercase identifiers unchanged.
|
230
|
+
# * columns created with quoted lowercase names are not supported.
|
231
|
+
#
|
232
|
+
# Converts an identifier to the case conventions used by the DBMS.
|
233
|
+
# Assume received identifier is in ActiveRecord case.
|
234
|
+
def native_case(identifier)
|
235
|
+
case dbms.field_for(ODBC::SQL_IDENTIFIER_CASE)
|
236
|
+
when ODBC::SQL_IC_UPPER
|
237
|
+
identifier =~ /[A-Z]/ ? identifier : identifier.upcase
|
238
|
+
else
|
239
|
+
identifier
|
240
|
+
end
|
241
|
+
end
|
242
|
+
|
243
|
+
# Assume column is nullable if nullable == SQL_NULLABLE_UNKNOWN
|
244
|
+
def nullability(col_name, is_nullable, nullable)
|
245
|
+
not_nullable = (!is_nullable || nullable.to_s.match('NO') != nil)
|
246
|
+
result = !(not_nullable || nullable == SQL_NO_NULLS)
|
247
|
+
|
248
|
+
# HACK!
|
249
|
+
# MySQL native ODBC driver doesn't report nullability accurately.
|
250
|
+
# So force nullability of 'id' columns
|
251
|
+
col_name == 'id' ? false : result
|
252
|
+
end
|
253
|
+
end
|
254
|
+
end
|