snowflake_odbc_adapter 7.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,117 @@
1
+ module ActiveRecord
2
+ module ConnectionAdapters # :nodoc:
3
+ module SnowflakeOdbc
4
+ module DatabaseStatements # :nodoc:
5
+ # Have to because of create table
6
+ def prepared_statements
7
+ true
8
+ end
9
+
10
+ # Executes the SQL statement in the context of this connection.
11
+ # Returns the number of rows affected.
12
+ def execute(sql, name = nil, binds = [])
13
+ log(sql, name, binds) do |notification_payload|
14
+ rc = @raw_connection.do(sql, *binds.map { |bind| prepare_bind(bind).to_s })
15
+ notification_payload[:row_count] = rc
16
+ rc
17
+ end
18
+ end
19
+
20
+ # Executes delete +sql+ statement in the context of this connection using
21
+ # +binds+ as the bind substitutes. +name+ is logged along with
22
+ # the executed +sql+ statement.
23
+ def exec_delete(sql, name, binds)
24
+ execute(sql, name, binds)
25
+ end
26
+
27
+ # Begins the transaction (and turns off auto-committing).
28
+ def begin_db_transaction
29
+ @raw_connection.autocommit = false
30
+ end
31
+
32
+ # Commits the transaction (and turns on auto-committing).
33
+ def commit_db_transaction
34
+ @raw_connection.commit
35
+ @raw_connection.autocommit = true
36
+ end
37
+
38
+ # Rolls back the transaction (and turns on auto-committing). Must be
39
+ # done if the transaction block raises an exception or returns false.
40
+ def exec_rollback_db_transaction
41
+ @raw_connection.rollback
42
+ @raw_connection.autocommit = true
43
+ end
44
+
45
+ def internal_exec_query(sql, name = "SQL", binds = [], prepare: false, async: false, allow_retry: false) # :nodoc:
46
+ log(sql, name, binds) do |notification_payload|
47
+ if prepare || binds.any?
48
+ # TODO: refacto
49
+ stmt = @raw_connection.prepare(sql)
50
+ binds.each_with_index do |bind, i|
51
+ if bind.respond_to?("value_for_database") && bind.value.is_a?(Integer) || bind.is_a?(Integer)
52
+ stmt.param_type(i, ODBC::SQL_INTEGER)
53
+ end
54
+ end
55
+ stmt.execute(*binds.map { |bind| prepare_bind(bind).to_s })
56
+ else
57
+ stmt = @raw_connection.run(sql)
58
+ end
59
+ columns = stmt.columns
60
+ values = stmt.to_a
61
+ stmt.drop
62
+ notification_payload[:row_count] = values.count
63
+ values = values&.map { |row| row&.map { |value| _type_cast_value(value) } }
64
+ column_names = columns.keys.map { |key| format_case(key) }
65
+ ActiveRecord::Result.new(column_names, values)
66
+ end
67
+ end
68
+
69
+ def bind_params(binds, sql)
70
+ prepared_binds = *prepared_binds(binds)
71
+ prepared_binds.each.with_index(1) do |val, ind|
72
+ sql = sql.gsub("$#{ind}", "'#{val}'")
73
+ end
74
+ sql
75
+ end
76
+
77
+ def prepared_binds(binds)
78
+ binds.map(&:value_for_database)
79
+ end
80
+
81
+ private
82
+
83
+ def prepare_bind(bind)
84
+ if bind.respond_to?("value_for_database")
85
+ type_cast(bind.value_for_database)
86
+ elsif bind.is_a?(Integer)
87
+ bind.to_i
88
+ else
89
+ bind.to_s
90
+ end
91
+ end
92
+
93
+ def raw_execute(sql, name, async: false, allow_retry: false, materialize_transactions: true)
94
+ @raw_connection.do(sql)
95
+ end
96
+
97
+ # type cast value to handle ODBC::TimeStamp
98
+ def _type_cast_value(value)
99
+ if value.is_a?(ODBC::TimeStamp)
100
+ Time.parse(value.to_s)
101
+ else
102
+ value
103
+ end
104
+ end
105
+
106
+ # Assume received identifier is in DBMS's data dictionary case.
107
+ def format_case(identifier)
108
+ if ::SnowflakeOdbcAdapter::Metadata.instance.upcase_identifiers?
109
+ identifier =~ /[a-z]/ ? identifier : identifier.downcase
110
+ else
111
+ identifier
112
+ end
113
+ end
114
+ end
115
+ end
116
+ end
117
+ end
@@ -0,0 +1,42 @@
1
+ require "snowflake_odbc_adapter/metadata"
2
+
3
+ module ActiveRecord
4
+ module ConnectionAdapters
5
+ module SnowflakeOdbc
6
+ module Quoting # :nodoc:
7
+ extend ActiveSupport::Concern # :nodoc:
8
+ module ClassMethods # :nodoc:
9
+ # Returns a quoted form of the column name.
10
+ def quote_column_name(name)
11
+ name = name.to_s
12
+ quote_char = identifier_quote_char.to_s.strip
13
+ return name if quote_char.empty?
14
+
15
+ quote_char = quote_char[0]
16
+ # Avoid quoting any already quoted name
17
+ return name if name[0] == quote_char && name[-1] == quote_char
18
+
19
+ # If upcase identifiers, only quote mixed case names.
20
+ return name if upcase_identifiers? && name !~ /([A-Z]+[a-z])|([a-z]+[A-Z])/
21
+
22
+ "#{quote_char.chr}#{name}#{quote_char.chr}"
23
+ end
24
+
25
+ def quote_table_name(name)
26
+ name
27
+ end
28
+
29
+ private
30
+
31
+ def identifier_quote_char
32
+ ::SnowflakeOdbcAdapter::Metadata.instance.identifier_quote_char
33
+ end
34
+
35
+ def upcase_identifiers?
36
+ ::SnowflakeOdbcAdapter::Metadata.instance.upcase_identifiers?
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,133 @@
1
+ # frozen_string_literal: true
2
+
3
+ module ActiveRecord
4
+ module ConnectionAdapters # :nodoc:
5
+ module SnowflakeOdbc
6
+ module SchemaStatements # :nodoc:
7
+ # ODBC constants missing from Christian Werner's Ruby ODBC driver
8
+ SQL_NO_NULLS = 0
9
+ SQL_NULLABLE = 1
10
+ SQL_NULLABLE_UNKNOWN = 2
11
+
12
+ def data_sources
13
+ tables | views
14
+ end
15
+
16
+ # Returns a Hash of mappings from the abstract data types to the native
17
+ # database types. See TableDefinition#column for details on the recognized
18
+ # abstract data types.
19
+ def native_database_types
20
+ @native_database_types ||= ::SnowflakeOdbcAdapter::ColumnMetadata.new(self).native_database_types
21
+ end
22
+
23
+ # Returns an array of table names, for database tables visible on the
24
+ # current connection.
25
+ def tables(_name = nil)
26
+ stmt = @raw_connection.tables
27
+ result = stmt.fetch_all || []
28
+ stmt&.drop
29
+ result = ::SnowflakeOdbcAdapter::Snowflake.table_filter(result, @raw_connection)
30
+ result.each_with_object([]) do |row, table_names|
31
+ table_names << format_case(row[2])
32
+ end
33
+ end
34
+
35
+ # Returns an array of view names, for database views visible on the
36
+ # current connection.
37
+ def views(_name = nil)
38
+ stmt = @raw_connection.tables
39
+ result = stmt.fetch_all || []
40
+ stmt&.drop
41
+ result = ::SnowflakeOdbcAdapter::Snowflake.view_filter(result, @raw_connection)
42
+ result.each_with_object([]) do |row, table_names|
43
+ table_names << format_case(row[2])
44
+ end
45
+ end
46
+
47
+ # Checks to see if the table +table_name+ exists on the database.
48
+ #
49
+ # table_exists?(:developers)
50
+ #
51
+ def table_exists?(table_name)
52
+ stmt = @raw_connection.tables(native_case(table_name.to_s))
53
+ result = stmt.fetch_all || []
54
+ stmt.drop
55
+ result.size.positive?
56
+ end
57
+
58
+ def column_definitions(table_name)
59
+ stmt = @raw_connection.columns(native_case(table_name.to_s))
60
+ result = stmt.fetch_all || []
61
+ stmt.drop
62
+ # Column can return some technical columns
63
+ ::SnowflakeOdbcAdapter::Snowflake.column_filters(result)
64
+ end
65
+
66
+ def new_column_from_field(table_name, field, _definitions)
67
+ col_name = field[3]
68
+ SnowflakeOdbc::Column.new(
69
+ format_case(col_name), # SQLColumns: COLUMN_NAME,
70
+ field[12], # SQLColumns: COLUMN_DEF,
71
+ sql_type_metadata(field),
72
+ nullability(field[17], field[10])
73
+ )
74
+ end
75
+
76
+ def primary_key(table_name)
77
+ stmt = @raw_connection.primary_keys(native_case(table_name.to_s))
78
+ result = stmt.fetch_all || []
79
+ stmt&.drop
80
+ result[0] && format_case(result[0][3])
81
+ end
82
+
83
+ def rename_table(table_name, new_name, **options)
84
+ validate_table_length!(new_name) unless options[:_uses_legacy_table_name]
85
+ clear_cache!
86
+ schema_cache.clear_data_source_cache!(table_name.to_s)
87
+ schema_cache.clear_data_source_cache!(new_name.to_s)
88
+ execute "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
89
+ end
90
+
91
+ # Renames a column in a table.
92
+ def rename_column(table_name, column_name, new_column_name) # :nodoc:
93
+ clear_cache!
94
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{rename_column_sql(table_name, column_name,
95
+ new_column_name)}")
96
+ end
97
+
98
+ private
99
+
100
+ def sql_type_metadata(col)
101
+ col_scale = col[8] # SQLColumns: DECIMAL_DIGITS
102
+ col_sql_type = col[4] # SQLColumns: DATA_TYPE
103
+ col_limit = col[6] # SQLColumns: COLUMN_SIZE
104
+ args = { sql_type: col_sql_type, type: col_sql_type, limit: col_limit }
105
+ col_native_type = col[5] # SQLColumns: TYPE_NAME
106
+ args[:sql_type] = ::SnowflakeOdbcAdapter::Snowflake.type_mapper(col)
107
+ args[:sql_type] = "boolean" if col_native_type == "BOOLEAN"
108
+ args[:sql_type] = "json" if %w[VARIANT JSON STRUCT].include?(col_native_type)
109
+ args[:sql_type] = "date" if col_native_type == "DATE"
110
+ if [ ODBC::SQL_DECIMAL, ODBC::SQL_NUMERIC ].include?(col_sql_type)
111
+ args[:scale] = col_scale || 0
112
+ args[:precision] = col_limit
113
+ end
114
+ ActiveRecord::ConnectionAdapters::SqlTypeMetadata.new(**args)
115
+ end
116
+
117
+ def native_case(identifier)
118
+ if ::SnowflakeOdbcAdapter::Metadata.instance.upcase_identifiers?
119
+ identifier =~ /[A-Z]/ ? identifier : identifier.upcase
120
+ else
121
+ identifier
122
+ end
123
+ end
124
+
125
+ # Assume column is nullable if nullable == SQL_NULLABLE_UNKNOWN
126
+ def nullability(is_nullable, nullable)
127
+ not_nullable = !is_nullable || !nullable.to_s.match("NO").nil?
128
+ !(not_nullable || nullable == SQL_NO_NULLS)
129
+ end
130
+ end
131
+ end
132
+ end
133
+ end
@@ -0,0 +1,132 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "active_record"
4
+ require "active_record/connection_adapters/abstract_adapter"
5
+ require "active_record/connection_adapters/snowflake_odbc/quoting"
6
+ require "active_record/connection_adapters/snowflake_odbc/database_statements"
7
+ require "active_record/connection_adapters/snowflake_odbc/schema_statements"
8
+ require "active_record/connection_adapters/snowflake_odbc/column"
9
+ require "snowflake_odbc_adapter/metadata"
10
+ require "snowflake_odbc_adapter/column_metadata"
11
+ require "odbc"
12
+ require "odbc_utf8"
13
+
14
+ module ActiveRecord
15
+ module ConnectionAdapters
16
+ class SnowflakeOdbcAdapter < AbstractAdapter
17
+ ADAPTER_NAME = "ODBC"
18
+
19
+ include SnowflakeOdbc::Quoting
20
+ include SnowflakeOdbc::DatabaseStatements
21
+ include SnowflakeOdbc::SchemaStatements
22
+
23
+ class << self
24
+ def new_client(config)
25
+ config = config.symbolize_keys
26
+ _, config = if config.key?(:dsn)
27
+ dsn_connection(config)
28
+ elsif config.key?(:conn_str)
29
+ str_connection(config)
30
+ else
31
+ raise ArgumentError, "No data source name (:dsn) or connection string (:conn_str) specified."
32
+ end
33
+ rescue ::ODBC::Error => error
34
+ # TODO: be more specific on error to raise
35
+ raise ActiveRecord::ConnectionNotEstablished, error.message
36
+ end
37
+
38
+ def dbconsole(config, options = {})
39
+ raise NotImplementedError
40
+ end
41
+
42
+ private
43
+ def dsn_connection(config)
44
+ raise NotImplementedError
45
+ end
46
+
47
+ def str_connection(config)
48
+ attrs = config[:conn_str].split(";").map { |option| option.split("=", 2) }.to_h
49
+ odbc_module = attrs["ENCODING"] == "utf8" ? ODBC_UTF8 : ODBC
50
+ driver = odbc_module::Driver.new
51
+ driver.name = "odbc"
52
+ driver.attrs = attrs
53
+ connection = odbc_module::Database.new.drvconnect(driver)
54
+ # encoding_bug indicates that the driver is using non ASCII and has the issue referenced here https://github.com/larskanis/ruby-odbc/issues/2
55
+ [ connection, config.merge(driver: driver, encoding: attrs["ENCODING"], encoding_bug: attrs["ENCODING"] == "utf8") ]
56
+ end
57
+
58
+
59
+ def initialize_type_map(m)
60
+ super(m)
61
+ # Integer are negated by active record
62
+ m.register_type (-1 * ODBC::SQL_TIMESTAMP), Type::DateTime.new
63
+ m.register_type "boolean", Type::Boolean.new
64
+ m.register_type "json", Type::Json.new
65
+ m.register_type (-1 * ODBC::SQL_CHAR), Type::String.new
66
+ m.register_type (-1 * ODBC::SQL_LONGVARCHAR), Type::Text.new
67
+ m.register_type (-1 * ODBC::SQL_TINYINT), Type::Integer.new(limit: 4)
68
+ m.register_type (-1 * ODBC::SQL_SMALLINT), Type::Integer.new(limit: 8)
69
+ m.register_type (-1 * ODBC::SQL_INTEGER), Type::Integer.new(limit: 16)
70
+ m.register_type (-1 * ODBC::SQL_BIGINT), Type::BigInteger.new(limit: 32)
71
+ m.register_type (-1 * ODBC::SQL_REAL), Type::Float.new(limit: 24)
72
+ m.register_type (-1 * ODBC::SQL_FLOAT), Type::Float.new
73
+ m.register_type (-1 * ODBC::SQL_DOUBLE), Type::Float.new(limit: 53)
74
+ m.register_type (-1 * ODBC::SQL_DECIMAL), Type::Float.new
75
+ m.register_type (-1 * ODBC::SQL_NUMERIC), Type::Integer.new
76
+ m.register_type (-1 * ODBC::SQL_BINARY), Type::Binary.new
77
+ m.register_type (-1 * ODBC::SQL_DATE), Type::Date.new
78
+ m.register_type (-1 * ODBC::SQL_DATETIME), Type::DateTime.new
79
+ m.register_type (-1 * ODBC::SQL_TIME), Type::Time.new
80
+ m.register_type (-1 * ODBC::SQL_TIMESTAMP), Type::DateTime.new
81
+ m.register_type (-1 * ODBC::SQL_GUID), Type::String.new
82
+
83
+ alias_type m, (-1 * ODBC::SQL_BIT), "boolean"
84
+ alias_type m, (-1 * ODBC::SQL_VARCHAR), (-1 * ODBC::SQL_CHAR)
85
+ alias_type m, (-1 * ODBC::SQL_WCHAR), (-1 * ODBC::SQL_CHAR)
86
+ alias_type m, (-1 * ODBC::SQL_WVARCHAR), (-1 * ODBC::SQL_CHAR)
87
+ alias_type m, (-1 * ODBC::SQL_WLONGVARCHAR), (-1 * ODBC::SQL_LONGVARCHAR)
88
+ alias_type m, (-1 * ODBC::SQL_VARBINARY), (-1 * ODBC::SQL_BINARY)
89
+ alias_type m, (-1 * ODBC::SQL_LONGVARBINARY), (-1 * ODBC::SQL_BINARY)
90
+ alias_type m, (-1 * ODBC::SQL_TYPE_DATE), (-1 * ODBC::SQL_DATE)
91
+ alias_type m, (-1 * ODBC::SQL_TYPE_TIME), (-1 * ODBC::SQL_TIME)
92
+ alias_type m, (-1 * ODBC::SQL_TYPE_TIMESTAMP), (-1 * ODBC::SQL_TIMESTAMP)
93
+ end
94
+
95
+ # Can't use the built-in ActiveRecord map#alias_type because it doesn't
96
+ # work with non-string keys, and in our case the keys are (almost) all
97
+ # numeric
98
+ def alias_type(map, new_type, old_type)
99
+ map.register_type(new_type) do |_, *args|
100
+ map.lookup(old_type, *args)
101
+ end
102
+ end
103
+ end
104
+
105
+ TYPE_MAP = Type::TypeMap.new.tap { |m| initialize_type_map(m) }
106
+
107
+ def initialize(...)
108
+ super
109
+ @raw_connection, @config = self.class.new_client(@config)
110
+ @raw_connection.use_time = true
111
+ ::SnowflakeOdbcAdapter::Metadata.instance.connection(@config, @raw_connection)
112
+ end
113
+
114
+ def supports_insert_returning?
115
+ false
116
+ end
117
+
118
+ def active?
119
+ @raw_connection.connected?
120
+ end
121
+
122
+ def reconnect
123
+ @raw_connection, @config = self.class.new_client(@config)
124
+ end
125
+
126
+ def disconnect!
127
+ @raw_connection.disconnect if @raw_connection.connected?
128
+ end
129
+ end
130
+ register "odbc", "ActiveRecord::ConnectionAdapters::SnowflakeOdbcAdapter", "active_record/connection_adapters/snowflake_odbc_adapter"
131
+ end
132
+ end
@@ -0,0 +1,83 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "odbc"
4
+ require "snowflake_odbc_adapter/snowflake"
5
+
6
+ module SnowflakeOdbcAdapter
7
+ class ColumnMetadata # :nodoc:
8
+ GENERICS = {
9
+ primary_key: [ ODBC::SQL_INTEGER, ODBC::SQL_SMALLINT ],
10
+ string: [ ODBC::SQL_VARCHAR ],
11
+ text: [ ODBC::SQL_LONGVARCHAR, ODBC::SQL_VARCHAR ],
12
+ integer: [ ODBC::SQL_INTEGER, ODBC::SQL_SMALLINT ],
13
+ decimal: [ ODBC::SQL_NUMERIC, ODBC::SQL_DECIMAL ],
14
+ float: [ ODBC::SQL_DOUBLE, ODBC::SQL_REAL ],
15
+ datetime: [ ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP ],
16
+ timestamp: [ ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP ],
17
+ time: [ ODBC::SQL_TYPE_TIME, ODBC::SQL_TIME, ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP ],
18
+ date: [ ODBC::SQL_TYPE_DATE, ODBC::SQL_DATE, ODBC::SQL_TYPE_TIMESTAMP, ODBC::SQL_TIMESTAMP ],
19
+ binary: [ ODBC::SQL_LONGVARBINARY, ODBC::SQL_VARBINARY ],
20
+ boolean: [ ODBC::SQL_BIT, ODBC::SQL_TINYINT, ODBC::SQL_SMALLINT, ODBC::SQL_INTEGER ],
21
+ json: [ 2004 ]
22
+ }.freeze
23
+
24
+ attr_reader :adapter
25
+
26
+ def initialize(adapter)
27
+ @adapter = adapter
28
+ end
29
+
30
+ def native_database_types
31
+ grouped = reported_types.group_by { |row| row[1] }
32
+
33
+ GENERICS.each_with_object({}) do |(abstract, candidates), mapped|
34
+ candidates.detect do |candidate|
35
+ next unless grouped[candidate]
36
+
37
+ mapped[abstract] = native_type_mapping(abstract, grouped[candidate])
38
+ end
39
+ end
40
+ end
41
+
42
+ private
43
+
44
+ # Creates a Hash describing a mapping from an abstract type to a
45
+ # DBMS native type for use by #native_database_types
46
+ def native_type_mapping(abstract, rows)
47
+ # The appropriate SQL for :primary_key is hard to derive as
48
+ # ODBC doesn't provide any info on a DBMS's native syntax for
49
+ # autoincrement columns. So we use a lookup instead.
50
+ return Snowflake::PRIMARY_KEY if abstract == :primary_key
51
+
52
+ selected_row = rows[0]
53
+
54
+ # If more than one native type corresponds to the SQL type we're
55
+ # handling, the type in the first descriptor should be the
56
+ # best match, because the ODBC specification states that
57
+ # SQLGetTypeInfo returns the results ordered by SQL type and then by
58
+ # how closely the native type maps to that SQL type.
59
+ # But, for :text and :binary, select the native type with the
60
+ # largest capacity. (Compare SQLGetTypeInfo:COLUMN_SIZE values)
61
+ selected_row = rows.max_by { |row| row[2] } if %i[text binary].include?(abstract)
62
+ result = { name: selected_row[0] } # SQLGetTypeInfo: TYPE_NAME
63
+
64
+ create_params = selected_row[5]
65
+ # Depending on the column type, the CREATE_PARAMS keywords can
66
+ # include length, precision or scale.
67
+ if create_params && !create_params.strip.empty? && ! %i[decimal json].include?(abstract)
68
+ result[:limit] = selected_row[2] # SQLGetTypeInfo: COL_SIZE
69
+ end
70
+ result
71
+ end
72
+
73
+ def reported_types
74
+ @reported_types ||=
75
+ begin
76
+ stmt = adapter.raw_connection.types
77
+ stmt.fetch_all
78
+ ensure
79
+ stmt&.drop
80
+ end
81
+ end
82
+ end
83
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "singleton"
4
+
5
+ module SnowflakeOdbcAdapter
6
+ class Metadata # :nodoc:
7
+ include Singleton
8
+
9
+ FIELDS = %i[
10
+ SQL_DBMS_NAME
11
+ SQL_DBMS_VER
12
+ SQL_IDENTIFIER_CASE
13
+ SQL_QUOTED_IDENTIFIER_CASE
14
+ SQL_IDENTIFIER_QUOTE_CHAR
15
+ SQL_MAX_IDENTIFIER_LEN
16
+ SQL_MAX_TABLE_NAME_LEN
17
+ SQL_USER_NAME
18
+ SQL_DATABASE_NAME
19
+ ].freeze
20
+
21
+ attr_reader :identifier_quote_char
22
+
23
+ def initialize
24
+ @mutex = Mutex.new
25
+ end
26
+
27
+ FIELDS.each do |field|
28
+ define_method(field.to_s.downcase.gsub("sql_", "")) do
29
+ metadata[field]
30
+ end
31
+ end
32
+
33
+ def upcase_identifiers?
34
+ (identifier_case == ODBC::SQL_IC_UPPER)
35
+ end
36
+
37
+ def connection(config, connection)
38
+ unless @connection
39
+ with_mutex do
40
+ @connection = connection
41
+ end
42
+ end
43
+ @connection
44
+ end
45
+
46
+ private
47
+
48
+ def metadata
49
+ raise "Need to connect" unless @connection
50
+
51
+ unless @metadata
52
+ with_mutex do
53
+ @metadata = Hash[FIELDS.map do |field|
54
+ info = @connection.get_info(ODBC.const_get(field))
55
+ [ field, info ]
56
+ end]
57
+ end
58
+ end
59
+ @metadata
60
+ end
61
+
62
+ def with_mutex(&block)
63
+ @mutex.synchronize(&block)
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,34 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SnowflakeOdbcAdapter
4
+ # Snowflake specific overrides
5
+ module Snowflake
6
+ PRIMARY_KEY = "BIGINT UNIQUE PRIMARY KEY AUTOINCREMENT START 1 INCREMENT 1 ORDER "
7
+ class << self
8
+ # Remove Snowflake specific columns
9
+ def column_filters(columns)
10
+ columns.reject { |col| col[0] =~ /^snowflake$/i }.reject { |col| col[1] =~ /^account_usage$/i }
11
+ end
12
+
13
+ def type_mapper(col)
14
+ # Transform DECIMAL 38 0 into BIGINT ODBC Specific
15
+ return "bigint" if col[4] == ODBC::SQL_DECIMAL && col[8] == 0 && col[6] == 38
16
+ col[4]
17
+ end
18
+
19
+ # Remove outside database tables
20
+ def table_filter(tables, connection)
21
+ database = connection.get_info(ODBC::SQL_DATABASE_NAME)
22
+ tables.select { |table| table[0] == database && table[3] =~ /^TABLE$/i }
23
+ end
24
+
25
+ # Remove outside database views
26
+ def view_filter(tables, connection)
27
+ database = connection.get_info(ODBC::SQL_DATABASE_NAME)
28
+ tables.select do |table|
29
+ table[0] == database && table[3] =~ /^VIEW$/i && table[1] !~ /^information_schema$/i
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module SnowflakeOdbcAdapter
4
+ VERSION = "7.2.0"
5
+ end
@@ -0,0 +1,4 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "snowflake_odbc_adapter/version"
4
+ require "active_record/connection_adapters/snowflake_odbc_adapter"
@@ -0,0 +1,4 @@
1
+ module SnowflakeOdbcAdapter
2
+ VERSION: String
3
+ # See the writing guide of rbs: https://github.com/ruby/rbs#guides
4
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "lib/snowflake_odbc_adapter/version"
4
+
5
+ Gem::Specification.new do |spec|
6
+ spec.name = "snowflake_odbc_adapter"
7
+ spec.version = SnowflakeOdbcAdapter::VERSION
8
+ spec.authors = [ "Guillaume GILLET" ]
9
+ spec.email = [ "guillaume.gillet@singlespot.com" ]
10
+
11
+ spec.summary = "ODBC ActiveRecord adapter design for Snowflake"
12
+ spec.description = <<~TXT
13
+ As the generic odbc adapter https://github.com/localytics/odbc_adapter is no longer maintain and
14
+ do not follow the rails evolution, we need to create our own.
15
+ TXT
16
+ spec.homepage = "https://github.com/singlespot/snowflake_odbc_adapter"
17
+ spec.license = "MIT"
18
+ spec.required_ruby_version = ">= 3.1.0"
19
+
20
+ spec.metadata["homepage_uri"] = spec.homepage
21
+ spec.metadata["source_code_uri"] = "https://github.com/GuillaumeGillet/snowflake_odbc_adapter"
22
+ spec.metadata["changelog_uri"] = "https://github.com/GuillaumeGillet/snowflake_odbc_adapter/CHANGELOG.md."
23
+
24
+ # Specify which files should be added to the gem when it is released.
25
+ # The `git ls-files -z` loads the files in the RubyGem that have been added into git.
26
+ spec.files = Dir.chdir(__dir__) do
27
+ `git ls-files -z`.split("\x0").reject do |f|
28
+ (File.expand_path(f) == __FILE__) || f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor])
29
+ end
30
+ end
31
+ spec.bindir = "exe"
32
+ spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
33
+ spec.require_paths = [ "lib" ]
34
+
35
+ # Uncomment to register a new dependency of your gem
36
+ spec.add_dependency "activerecord", ">= 7.2"
37
+ spec.add_dependency "ruby-odbc"
38
+
39
+ # For more information and examples about making a new gem, check out our
40
+ # guide at: https://bundler.io/guides/creating_gem.html
41
+ end