activerecord-jdbc-adapter 70.0.pre-java → 70.2-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Rakefile +1 -1
- data/activerecord-jdbc-adapter.gemspec +1 -3
- data/lib/arel/visitors/compat.rb +5 -33
- data/lib/arel/visitors/h2.rb +1 -13
- data/lib/arel/visitors/hsqldb.rb +1 -21
- data/lib/arel/visitors/sql_server.rb +2 -103
- data/lib/arjdbc/abstract/core.rb +8 -9
- data/lib/arjdbc/abstract/database_statements.rb +8 -0
- data/lib/arjdbc/discover.rb +0 -67
- data/lib/arjdbc/jdbc/adapter.rb +1 -1
- data/lib/arjdbc/jdbc/adapter_java.jar +0 -0
- data/lib/arjdbc/jdbc/column.rb +1 -26
- data/lib/arjdbc/jdbc.rb +0 -7
- data/lib/arjdbc/mysql/adapter.rb +2 -1
- data/lib/arjdbc/mysql/connection_methods.rb +43 -42
- data/lib/arjdbc/oracle/adapter.rb +4 -23
- data/lib/arjdbc/postgresql/adapter.rb +152 -3
- data/lib/arjdbc/postgresql/oid_types.rb +155 -108
- data/lib/arjdbc/sqlite3/adapter.rb +54 -36
- data/lib/arjdbc/tasks/database_tasks.rb +0 -15
- data/lib/arjdbc/util/serialized_attributes.rb +0 -22
- data/lib/arjdbc/util/table_copier.rb +2 -1
- data/lib/arjdbc/version.rb +1 -1
- data/rakelib/02-test.rake +3 -18
- data/src/java/arjdbc/jdbc/RubyJdbcConnection.java +1 -1
- data/src/java/arjdbc/postgresql/PostgreSQLRubyJdbcConnection.java +5 -0
- data/src/java/arjdbc/sqlite3/SQLite3RubyJdbcConnection.java +6 -1
- metadata +6 -39
- data/lib/active_record/connection_adapters/as400_adapter.rb +0 -2
- data/lib/active_record/connection_adapters/db2_adapter.rb +0 -1
- data/lib/active_record/connection_adapters/derby_adapter.rb +0 -1
- data/lib/active_record/connection_adapters/informix_adapter.rb +0 -1
- data/lib/arel/visitors/db2.rb +0 -137
- data/lib/arel/visitors/derby.rb +0 -112
- data/lib/arel/visitors/firebird.rb +0 -79
- data/lib/arjdbc/db2/adapter.rb +0 -808
- data/lib/arjdbc/db2/as400.rb +0 -142
- data/lib/arjdbc/db2/column.rb +0 -131
- data/lib/arjdbc/db2/connection_methods.rb +0 -48
- data/lib/arjdbc/db2.rb +0 -4
- data/lib/arjdbc/derby/active_record_patch.rb +0 -13
- data/lib/arjdbc/derby/adapter.rb +0 -521
- data/lib/arjdbc/derby/connection_methods.rb +0 -20
- data/lib/arjdbc/derby/schema_creation.rb +0 -15
- data/lib/arjdbc/derby.rb +0 -3
- data/lib/arjdbc/firebird/adapter.rb +0 -413
- data/lib/arjdbc/firebird/connection_methods.rb +0 -23
- data/lib/arjdbc/firebird.rb +0 -4
- data/lib/arjdbc/informix/adapter.rb +0 -139
- data/lib/arjdbc/informix/connection_methods.rb +0 -9
- data/lib/arjdbc/sybase/adapter.rb +0 -47
- data/lib/arjdbc/sybase.rb +0 -2
- data/lib/arjdbc/tasks/db2_database_tasks.rb +0 -104
- data/lib/arjdbc/tasks/derby_database_tasks.rb +0 -95
- data/src/java/arjdbc/derby/DerbyModule.java +0 -178
- data/src/java/arjdbc/derby/DerbyRubyJdbcConnection.java +0 -152
- data/src/java/arjdbc/firebird/FirebirdRubyJdbcConnection.java +0 -174
- data/src/java/arjdbc/informix/InformixRubyJdbcConnection.java +0 -75
@@ -40,7 +40,7 @@ module ArJdbc
|
|
40
40
|
return if @@_initialized; @@_initialized = true
|
41
41
|
|
42
42
|
require 'arjdbc/util/serialized_attributes'
|
43
|
-
Util::SerializedAttributes.setup
|
43
|
+
Util::SerializedAttributes.setup %r{LOB\(|LOB$}i, 'after_save_with_oracle_lob'
|
44
44
|
|
45
45
|
unless ActiveRecord::ConnectionAdapters::AbstractAdapter.
|
46
46
|
instance_methods(false).detect { |m| m.to_s == "prefetch_primary_key?" }
|
@@ -285,7 +285,7 @@ module ArJdbc
|
|
285
285
|
execute "ALTER TABLE #{quote_table_name(table_name)} ADD CONSTRAINT #{quote_column_name(index_name)} #{index_type} (#{quoted_column_names})"
|
286
286
|
end
|
287
287
|
end
|
288
|
-
end
|
288
|
+
end
|
289
289
|
|
290
290
|
# @private
|
291
291
|
def add_index_options(table_name, column_name, options = {})
|
@@ -309,7 +309,7 @@ module ArJdbc
|
|
309
309
|
|
310
310
|
quoted_column_names = column_names.map { |e| quote_column_name_or_expression(e) }.join(", ")
|
311
311
|
[ index_name, index_type, quoted_column_names, tablespace, index_options ]
|
312
|
-
end
|
312
|
+
end
|
313
313
|
|
314
314
|
# @override
|
315
315
|
def remove_index(table_name, options = {})
|
@@ -327,12 +327,7 @@ module ArJdbc
|
|
327
327
|
end
|
328
328
|
execute "ALTER TABLE #{quote_table_name(table_name)} DROP CONSTRAINT #{quote_column_name(index_name)}" rescue nil
|
329
329
|
execute "DROP INDEX #{quote_column_name(index_name)}"
|
330
|
-
end
|
331
|
-
|
332
|
-
# @private
|
333
|
-
def remove_index(table_name, options = {})
|
334
|
-
execute "DROP INDEX #{index_name(table_name, options)}"
|
335
|
-
end unless AR42
|
330
|
+
end
|
336
331
|
|
337
332
|
def change_column_default(table_name, column_name, default)
|
338
333
|
execute "ALTER TABLE #{quote_table_name(table_name)} MODIFY #{quote_column_name(column_name)} DEFAULT #{quote(default)}"
|
@@ -361,25 +356,11 @@ module ArJdbc
|
|
361
356
|
"RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
|
362
357
|
end
|
363
358
|
|
364
|
-
if ActiveRecord::VERSION::MAJOR >= 4
|
365
|
-
|
366
359
|
# @override
|
367
360
|
def remove_column(table_name, column_name, type = nil, options = {})
|
368
361
|
do_remove_column(table_name, column_name)
|
369
362
|
end
|
370
363
|
|
371
|
-
else
|
372
|
-
|
373
|
-
# @override
|
374
|
-
def remove_column(table_name, *column_names)
|
375
|
-
for column_name in column_names.flatten
|
376
|
-
do_remove_column(table_name, column_name)
|
377
|
-
end
|
378
|
-
end
|
379
|
-
alias remove_columns remove_column
|
380
|
-
|
381
|
-
end
|
382
|
-
|
383
364
|
def do_remove_column(table_name, column_name)
|
384
365
|
execute "ALTER TABLE #{quote_table_name(table_name)} DROP COLUMN #{quote_column_name(column_name)}"
|
385
366
|
end
|
@@ -21,6 +21,7 @@ require 'arjdbc/abstract/transaction_support'
|
|
21
21
|
require 'arjdbc/postgresql/base/array_decoder'
|
22
22
|
require 'arjdbc/postgresql/base/array_encoder'
|
23
23
|
require 'arjdbc/postgresql/name'
|
24
|
+
require 'active_model'
|
24
25
|
|
25
26
|
module ArJdbc
|
26
27
|
# Strives to provide Rails built-in PostgreSQL adapter (API) compatibility.
|
@@ -320,6 +321,38 @@ module ArJdbc
|
|
320
321
|
exec_query("SELECT extname FROM pg_extension", "SCHEMA").cast_values
|
321
322
|
end
|
322
323
|
|
324
|
+
# Returns a list of defined enum types, and their values.
|
325
|
+
def enum_types
|
326
|
+
query = <<~SQL
|
327
|
+
SELECT
|
328
|
+
type.typname AS name,
|
329
|
+
string_agg(enum.enumlabel, ',' ORDER BY enum.enumsortorder) AS value
|
330
|
+
FROM pg_enum AS enum
|
331
|
+
JOIN pg_type AS type
|
332
|
+
ON (type.oid = enum.enumtypid)
|
333
|
+
GROUP BY type.typname;
|
334
|
+
SQL
|
335
|
+
exec_query(query, "SCHEMA").cast_values
|
336
|
+
end
|
337
|
+
|
338
|
+
# Given a name and an array of values, creates an enum type.
|
339
|
+
def create_enum(name, values)
|
340
|
+
sql_values = values.map { |s| "'#{s}'" }.join(", ")
|
341
|
+
query = <<~SQL
|
342
|
+
DO $$
|
343
|
+
BEGIN
|
344
|
+
IF NOT EXISTS (
|
345
|
+
SELECT 1 FROM pg_type t
|
346
|
+
WHERE t.typname = '#{name}'
|
347
|
+
) THEN
|
348
|
+
CREATE TYPE \"#{name}\" AS ENUM (#{sql_values});
|
349
|
+
END IF;
|
350
|
+
END
|
351
|
+
$$;
|
352
|
+
SQL
|
353
|
+
exec_query(query)
|
354
|
+
end
|
355
|
+
|
323
356
|
# Returns the configured supported identifier length supported by PostgreSQL
|
324
357
|
def max_identifier_length
|
325
358
|
@max_identifier_length ||= query_value("SHOW max_identifier_length", "SCHEMA").to_i
|
@@ -672,6 +705,37 @@ module ActiveRecord::ConnectionAdapters
|
|
672
705
|
class PostgreSQLAdapter < AbstractAdapter
|
673
706
|
class_attribute :create_unlogged_tables, default: false
|
674
707
|
|
708
|
+
##
|
709
|
+
# :singleton-method:
|
710
|
+
# PostgreSQL allows the creation of "unlogged" tables, which do not record
|
711
|
+
# data in the PostgreSQL Write-Ahead Log. This can make the tables faster,
|
712
|
+
# but significantly increases the risk of data loss if the database
|
713
|
+
# crashes. As a result, this should not be used in production
|
714
|
+
# environments. If you would like all created tables to be unlogged in
|
715
|
+
# the test environment you can add the following line to your test.rb
|
716
|
+
# file:
|
717
|
+
#
|
718
|
+
# ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.create_unlogged_tables = true
|
719
|
+
class_attribute :create_unlogged_tables, default: false
|
720
|
+
|
721
|
+
##
|
722
|
+
# :singleton-method:
|
723
|
+
# PostgreSQL supports multiple types for DateTimes. By default, if you use +datetime+
|
724
|
+
# in migrations, Rails will translate this to a PostgreSQL "timestamp without time zone".
|
725
|
+
# Change this in an initializer to use another NATIVE_DATABASE_TYPES. For example, to
|
726
|
+
# store DateTimes as "timestamp with time zone":
|
727
|
+
#
|
728
|
+
# ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.datetime_type = :timestamptz
|
729
|
+
#
|
730
|
+
# Or if you are adding a custom type:
|
731
|
+
#
|
732
|
+
# ActiveRecord::ConnectionAdapters::PostgreSQLAdapter::NATIVE_DATABASE_TYPES[:my_custom_type] = { name: "my_custom_type_name" }
|
733
|
+
# ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.datetime_type = :my_custom_type
|
734
|
+
#
|
735
|
+
# If you're using +:ruby+ as your +config.active_record.schema_format+ and you change this
|
736
|
+
# setting, you should immediately run <tt>bin/rails db:migrate</tt> to update the types in your schema.rb.
|
737
|
+
class_attribute :datetime_type, default: :timestamp
|
738
|
+
|
675
739
|
# Try to use as much of the built in postgres logic as possible
|
676
740
|
# maybe someday we can extend the actual adapter
|
677
741
|
include ActiveRecord::ConnectionAdapters::PostgreSQL::ReferentialIntegrity
|
@@ -735,11 +799,96 @@ module ActiveRecord::ConnectionAdapters
|
|
735
799
|
|
736
800
|
private
|
737
801
|
|
738
|
-
|
739
|
-
|
802
|
+
FEATURE_NOT_SUPPORTED = "0A000" # :nodoc:
|
803
|
+
|
804
|
+
def execute_and_clear(sql, name, binds, prepare: false, async: false)
|
805
|
+
sql = transform_query(sql)
|
806
|
+
check_if_write_query(sql)
|
807
|
+
|
808
|
+
if !prepare || without_prepared_statement?(binds)
|
809
|
+
result = exec_no_cache(sql, name, binds, async: async)
|
810
|
+
else
|
811
|
+
result = exec_cache(sql, name, binds, async: async)
|
812
|
+
end
|
813
|
+
begin
|
814
|
+
ret = yield result
|
815
|
+
ensure
|
816
|
+
# Is this really result in AR PG?
|
817
|
+
# result.clear
|
818
|
+
end
|
819
|
+
ret
|
820
|
+
end
|
821
|
+
|
822
|
+
def exec_no_cache(sql, name, binds, async: false)
|
823
|
+
materialize_transactions
|
824
|
+
mark_transaction_written_if_write(sql)
|
825
|
+
|
826
|
+
# make sure we carry over any changes to ActiveRecord.default_timezone that have been
|
827
|
+
# made since we established the connection
|
828
|
+
update_typemap_for_default_timezone
|
829
|
+
|
830
|
+
type_casted_binds = type_casted_binds(binds)
|
831
|
+
log(sql, name, binds, type_casted_binds, async: async) do
|
832
|
+
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
833
|
+
@connection.exec_params(sql, type_casted_binds)
|
834
|
+
end
|
835
|
+
end
|
836
|
+
end
|
837
|
+
|
838
|
+
def exec_cache(sql, name, binds, async: false)
|
839
|
+
materialize_transactions
|
840
|
+
mark_transaction_written_if_write(sql)
|
841
|
+
update_typemap_for_default_timezone
|
842
|
+
|
843
|
+
stmt_key = prepare_statement(sql, binds)
|
844
|
+
type_casted_binds = type_casted_binds(binds)
|
845
|
+
|
846
|
+
log(sql, name, binds, type_casted_binds, stmt_key, async: async) do
|
847
|
+
ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
|
848
|
+
@connection.exec_prepared(stmt_key, type_casted_binds)
|
849
|
+
end
|
850
|
+
end
|
851
|
+
rescue ActiveRecord::StatementInvalid => e
|
852
|
+
raise unless is_cached_plan_failure?(e)
|
853
|
+
|
854
|
+
# Nothing we can do if we are in a transaction because all commands
|
855
|
+
# will raise InFailedSQLTransaction
|
856
|
+
if in_transaction?
|
857
|
+
raise ActiveRecord::PreparedStatementCacheExpired.new(e.cause.message)
|
858
|
+
else
|
859
|
+
@lock.synchronize do
|
860
|
+
# outside of transactions we can simply flush this query and retry
|
861
|
+
@statements.delete sql_key(sql)
|
862
|
+
end
|
863
|
+
retry
|
864
|
+
end
|
865
|
+
end
|
866
|
+
|
867
|
+
# Annoyingly, the code for prepared statements whose return value may
|
868
|
+
# have changed is FEATURE_NOT_SUPPORTED.
|
869
|
+
#
|
870
|
+
# This covers various different error types so we need to do additional
|
871
|
+
# work to classify the exception definitively as a
|
872
|
+
# ActiveRecord::PreparedStatementCacheExpired
|
873
|
+
#
|
874
|
+
# Check here for more details:
|
875
|
+
# https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573
|
876
|
+
def is_cached_plan_failure?(e)
|
877
|
+
pgerror = e.cause
|
878
|
+
pgerror.result.result_error_field(PG::PG_DIAG_SQLSTATE) == FEATURE_NOT_SUPPORTED &&
|
879
|
+
pgerror.result.result_error_field(PG::PG_DIAG_SOURCE_FUNCTION) == "RevalidateCachedQuery"
|
880
|
+
rescue
|
881
|
+
false
|
882
|
+
end
|
883
|
+
|
884
|
+
def in_transaction?
|
885
|
+
open_transactions > 0
|
886
|
+
end
|
887
|
+
|
888
|
+
# Returns the statement identifier for the client side cache
|
889
|
+
# of statements
|
740
890
|
def sql_key(sql)
|
741
891
|
"#{schema_search_path}-#{sql}"
|
742
892
|
end
|
743
|
-
|
744
893
|
end
|
745
894
|
end
|
@@ -1,5 +1,4 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
|
-
|
3
2
|
require 'thread'
|
4
3
|
|
5
4
|
module ArJdbc
|
@@ -91,8 +90,23 @@ module ArJdbc
|
|
91
90
|
end
|
92
91
|
|
93
92
|
def get_oid_type(oid, fmod, column_name, sql_type = '') # :nodoc:
|
94
|
-
|
95
|
-
|
93
|
+
# Note: type_map is storing a bunch of oid type prefixed with a namespace even
|
94
|
+
# if they are not namespaced (e.g. ""."oidvector"). builtin types which are
|
95
|
+
# common seem to not be prefixed (e.g. "varchar"). OID numbers are also keys
|
96
|
+
# but JDBC never returns those. So the current scheme is to check with
|
97
|
+
# what we got and that covers number and plain strings and otherwise we will
|
98
|
+
# wrap with the namespace form.
|
99
|
+
found = type_map.key?(oid)
|
100
|
+
|
101
|
+
if !found
|
102
|
+
key = oid.kind_of?(String) && oid != "oid" ? "\"\".\"#{oid}\"" : oid
|
103
|
+
found = type_map.key?(key)
|
104
|
+
|
105
|
+
if !found
|
106
|
+
load_additional_types([oid])
|
107
|
+
else
|
108
|
+
oid = key
|
109
|
+
end
|
96
110
|
end
|
97
111
|
|
98
112
|
type_map.fetch(oid, fmod, sql_type) {
|
@@ -103,132 +117,165 @@ module ArJdbc
|
|
103
117
|
}
|
104
118
|
end
|
105
119
|
|
120
|
+
def reload_type_map
|
121
|
+
type_map.clear
|
122
|
+
initialize_type_map
|
123
|
+
end
|
124
|
+
|
125
|
+
def initialize_type_map_inner(m)
|
126
|
+
m.register_type "int2", Type::Integer.new(limit: 2)
|
127
|
+
m.register_type "int4", Type::Integer.new(limit: 4)
|
128
|
+
m.register_type "int8", Type::Integer.new(limit: 8)
|
129
|
+
m.register_type "oid", OID::Oid.new
|
130
|
+
m.register_type "float4", Type::Float.new
|
131
|
+
m.alias_type "float8", "float4"
|
132
|
+
m.register_type "text", Type::Text.new
|
133
|
+
register_class_with_limit m, "varchar", Type::String
|
134
|
+
m.alias_type "char", "varchar"
|
135
|
+
m.alias_type "name", "varchar"
|
136
|
+
m.alias_type "bpchar", "varchar"
|
137
|
+
m.register_type "bool", Type::Boolean.new
|
138
|
+
register_class_with_limit m, "bit", OID::Bit
|
139
|
+
register_class_with_limit m, "varbit", OID::BitVarying
|
140
|
+
m.register_type "date", OID::Date.new
|
141
|
+
|
142
|
+
m.register_type "money", OID::Money.new
|
143
|
+
m.register_type "bytea", OID::Bytea.new
|
144
|
+
m.register_type "point", OID::Point.new
|
145
|
+
m.register_type "hstore", OID::Hstore.new
|
146
|
+
m.register_type "json", Type::Json.new
|
147
|
+
m.register_type "jsonb", OID::Jsonb.new
|
148
|
+
m.register_type "cidr", OID::Cidr.new
|
149
|
+
m.register_type "inet", OID::Inet.new
|
150
|
+
m.register_type "uuid", OID::Uuid.new
|
151
|
+
m.register_type "xml", OID::Xml.new
|
152
|
+
m.register_type "tsvector", OID::SpecializedString.new(:tsvector)
|
153
|
+
m.register_type "macaddr", OID::Macaddr.new
|
154
|
+
m.register_type "citext", OID::SpecializedString.new(:citext)
|
155
|
+
m.register_type "ltree", OID::SpecializedString.new(:ltree)
|
156
|
+
m.register_type "line", OID::SpecializedString.new(:line)
|
157
|
+
m.register_type "lseg", OID::SpecializedString.new(:lseg)
|
158
|
+
m.register_type "box", OID::SpecializedString.new(:box)
|
159
|
+
m.register_type "path", OID::SpecializedString.new(:path)
|
160
|
+
m.register_type "polygon", OID::SpecializedString.new(:polygon)
|
161
|
+
m.register_type "circle", OID::SpecializedString.new(:circle)
|
162
|
+
m.register_type "regproc", OID::Enum.new
|
163
|
+
# FIXME: adding this vector type leads to quoting not handlign Array data in quoting.
|
164
|
+
#m.register_type "_int4", OID::Vector.new(",", m.lookup("int4"))
|
165
|
+
register_class_with_precision m, "time", Type::Time
|
166
|
+
register_class_with_precision m, "timestamp", OID::Timestamp
|
167
|
+
register_class_with_precision m, "timestamptz", OID::TimestampWithTimeZone
|
168
|
+
|
169
|
+
m.register_type "numeric" do |_, fmod, sql_type|
|
170
|
+
precision = extract_precision(sql_type)
|
171
|
+
scale = extract_scale(sql_type)
|
172
|
+
|
173
|
+
# The type for the numeric depends on the width of the field,
|
174
|
+
# so we'll do something special here.
|
175
|
+
#
|
176
|
+
# When dealing with decimal columns:
|
177
|
+
#
|
178
|
+
# places after decimal = fmod - 4 & 0xffff
|
179
|
+
# places before decimal = (fmod - 4) >> 16 & 0xffff
|
180
|
+
if fmod && (fmod - 4 & 0xffff).zero?
|
181
|
+
# FIXME: Remove this class, and the second argument to
|
182
|
+
# lookups on PG
|
183
|
+
Type::DecimalWithoutScale.new(precision: precision)
|
184
|
+
else
|
185
|
+
OID::Decimal.new(precision: precision, scale: scale)
|
186
|
+
end
|
187
|
+
end
|
188
|
+
|
189
|
+
m.register_type "interval" do |*args, sql_type|
|
190
|
+
precision = extract_precision(sql_type)
|
191
|
+
OID::Interval.new(precision: precision)
|
192
|
+
end
|
193
|
+
|
194
|
+
# pgjdbc returns these if the column is auto-incrmenting
|
195
|
+
m.alias_type 'serial', 'int4'
|
196
|
+
m.alias_type 'bigserial', 'int8'
|
197
|
+
end
|
198
|
+
|
199
|
+
|
200
|
+
# We differ from AR here because we will initialize type_map when adapter initializes
|
106
201
|
def type_map
|
107
202
|
@type_map
|
108
203
|
end
|
109
204
|
|
110
|
-
def
|
111
|
-
|
112
|
-
|
113
|
-
initialize_type_map(@type_map)
|
114
|
-
end
|
205
|
+
def initialize_type_map(m = type_map)
|
206
|
+
initialize_type_map_inner(m)
|
207
|
+
load_additional_types
|
115
208
|
end
|
116
209
|
|
117
210
|
private
|
118
211
|
|
119
|
-
def
|
120
|
-
register_class_with_limit
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
register_class_with_limit m, 'varbit', OID::BitVarying
|
134
|
-
m.alias_type 'timestamptz', 'timestamp'
|
135
|
-
m.register_type 'date', OID::Date.new
|
136
|
-
|
137
|
-
m.register_type 'money', OID::Money.new
|
138
|
-
m.register_type 'bytea', OID::Bytea.new
|
139
|
-
m.register_type 'point', OID::Point.new
|
140
|
-
m.register_type 'hstore', OID::Hstore.new
|
141
|
-
m.register_type 'json', Type::Json.new
|
142
|
-
m.register_type 'jsonb', OID::Jsonb.new
|
143
|
-
m.register_type 'cidr', OID::Cidr.new
|
144
|
-
m.register_type 'inet', OID::Inet.new
|
145
|
-
m.register_type 'uuid', OID::Uuid.new
|
146
|
-
m.register_type 'xml', OID::Xml.new
|
147
|
-
m.register_type 'tsvector', OID::SpecializedString.new(:tsvector)
|
148
|
-
m.register_type 'macaddr', OID::Macaddr.new
|
149
|
-
m.register_type 'citext', OID::SpecializedString.new(:citext)
|
150
|
-
m.register_type 'ltree', OID::SpecializedString.new(:ltree)
|
151
|
-
m.register_type 'line', OID::SpecializedString.new(:line)
|
152
|
-
m.register_type 'lseg', OID::SpecializedString.new(:lseg)
|
153
|
-
m.register_type 'box', OID::SpecializedString.new(:box)
|
154
|
-
m.register_type 'path', OID::SpecializedString.new(:path)
|
155
|
-
m.register_type 'polygon', OID::SpecializedString.new(:polygon)
|
156
|
-
m.register_type 'circle', OID::SpecializedString.new(:circle)
|
157
|
-
|
158
|
-
m.register_type 'interval' do |*args, sql_type|
|
159
|
-
precision = extract_precision(sql_type)
|
160
|
-
OID::Interval.new(precision: precision)
|
212
|
+
def register_class_with_limit(...)
|
213
|
+
::ActiveRecord::ConnectionAdapters::AbstractAdapter.send(:register_class_with_limit, ...)
|
214
|
+
end
|
215
|
+
|
216
|
+
def register_class_with_precision(...)
|
217
|
+
::ActiveRecord::ConnectionAdapters::AbstractAdapter.send(:register_class_with_precision, ...)
|
218
|
+
end
|
219
|
+
|
220
|
+
def load_additional_types(oids = nil) # :nodoc:
|
221
|
+
initializer = ArjdbcTypeMapInitializer.new(type_map)
|
222
|
+
load_types_queries(initializer, oids) do |query|
|
223
|
+
execute_and_clear(query, "SCHEMA", []) do |records|
|
224
|
+
initializer.run(records)
|
225
|
+
end
|
161
226
|
end
|
227
|
+
end
|
162
228
|
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
#
|
173
|
-
# When dealing with decimal columns:
|
174
|
-
#
|
175
|
-
# places after decimal = fmod - 4 & 0xffff
|
176
|
-
# places before decimal = (fmod - 4) >> 16 & 0xffff
|
177
|
-
if fmod && (fmod - 4 & 0xffff).zero?
|
178
|
-
# FIXME: Remove this class, and the second argument to
|
179
|
-
# lookups on PG
|
180
|
-
Type::DecimalWithoutScale.new(precision: precision)
|
229
|
+
def load_types_queries(initializer, oids)
|
230
|
+
query = <<~SQL
|
231
|
+
SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, r.rngsubtype, t.typtype, t.typbasetype
|
232
|
+
FROM pg_type as t
|
233
|
+
LEFT JOIN pg_range as r ON oid = rngtypid
|
234
|
+
SQL
|
235
|
+
if oids
|
236
|
+
if oids.all? { |e| e.kind_of? Numeric }
|
237
|
+
yield query + "WHERE t.oid IN (%s)" % oids.join(", ")
|
181
238
|
else
|
182
|
-
|
239
|
+
in_list = oids.map { |e| %Q{'#{e}'} }.join(", ")
|
240
|
+
yield query + "WHERE t.typname IN (%s)" % in_list
|
183
241
|
end
|
242
|
+
else
|
243
|
+
yield query + initializer.query_conditions_for_known_type_names
|
244
|
+
yield query + initializer.query_conditions_for_known_type_types
|
245
|
+
yield query + initializer.query_conditions_for_array_types
|
184
246
|
end
|
185
|
-
|
186
|
-
load_additional_types(m)
|
187
|
-
|
188
|
-
# pgjdbc returns these if the column is auto-incrmenting
|
189
|
-
m.alias_type 'serial', 'int4'
|
190
|
-
m.alias_type 'bigserial', 'int8'
|
191
247
|
end
|
192
248
|
|
193
|
-
def
|
194
|
-
|
249
|
+
def update_typemap_for_default_timezone
|
250
|
+
if @default_timezone != ActiveRecord.default_timezone && @timestamp_decoder
|
251
|
+
decoder_class = ActiveRecord.default_timezone == :utc ?
|
252
|
+
PG::TextDecoder::TimestampUtc :
|
253
|
+
PG::TextDecoder::TimestampWithoutTimeZone
|
195
254
|
|
196
|
-
|
197
|
-
|
198
|
-
SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, r.rngsubtype, t.typtype, t.typbasetype,
|
199
|
-
ns.nspname, ns.nspname = ANY(current_schemas(true)) in_ns
|
200
|
-
FROM pg_type as t
|
201
|
-
LEFT JOIN pg_range as r ON oid = rngtypid
|
202
|
-
JOIN pg_namespace AS ns ON t.typnamespace = ns.oid
|
203
|
-
SQL
|
204
|
-
else
|
205
|
-
query = <<-SQL
|
206
|
-
SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, t.typtype, t.typbasetype,
|
207
|
-
ns.nspname, ns.nspname = ANY(current_schemas(true)) in_ns
|
208
|
-
FROM pg_type as t
|
209
|
-
JOIN pg_namespace AS ns ON t.typnamespace = ns.oid
|
210
|
-
SQL
|
211
|
-
end
|
255
|
+
@timestamp_decoder = decoder_class.new(@timestamp_decoder.to_h)
|
256
|
+
@connection.type_map_for_results.add_coder(@timestamp_decoder)
|
212
257
|
|
213
|
-
|
214
|
-
if oid.is_a? Numeric || oid.match(/^\d+$/)
|
215
|
-
# numeric OID
|
216
|
-
query += "WHERE t.oid = %s" % oid
|
258
|
+
@default_timezone = ActiveRecord.default_timezone
|
217
259
|
|
218
|
-
|
219
|
-
|
220
|
-
|
260
|
+
# if default timezone has changed, we need to reconfigure the connection
|
261
|
+
# (specifically, the session time zone)
|
262
|
+
configure_connection
|
263
|
+
end
|
264
|
+
end
|
221
265
|
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
else
|
227
|
-
query += initializer.query_conditions_for_initial_load
|
266
|
+
def extract_scale(sql_type)
|
267
|
+
case sql_type
|
268
|
+
when /\((\d+)\)/ then 0
|
269
|
+
when /\((\d+)(,(\d+))\)/ then $3.to_i
|
228
270
|
end
|
271
|
+
end
|
272
|
+
|
273
|
+
def extract_precision(sql_type)
|
274
|
+
$1.to_i if sql_type =~ /\((\d+)(,\d+)?\)/
|
275
|
+
end
|
229
276
|
|
230
|
-
|
231
|
-
|
277
|
+
def extract_limit(sql_type)
|
278
|
+
$1.to_i if sql_type =~ /\((.*)\)/
|
232
279
|
end
|
233
280
|
|
234
281
|
# Support arrays/ranges for defining attributes that don't exist in the db
|