activerecord-jdbc-adapter 5.0.pre1 → 51.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/.gitignore +1 -2
- data/.travis.yml +15 -416
- data/Gemfile +35 -37
- data/README.md +23 -118
- data/RUNNING_TESTS.md +31 -26
- data/Rakefile +2 -3
- data/activerecord-jdbc-adapter.gemspec +1 -2
- data/lib/arjdbc/abstract/connection_management.rb +21 -0
- data/lib/arjdbc/abstract/core.rb +62 -0
- data/lib/arjdbc/abstract/database_statements.rb +46 -0
- data/lib/arjdbc/abstract/statement_cache.rb +58 -0
- data/lib/arjdbc/abstract/transaction_support.rb +86 -0
- data/lib/arjdbc/derby/adapter.rb +6 -1
- data/lib/arjdbc/discover.rb +0 -7
- data/lib/arjdbc/firebird/adapter.rb +2 -2
- data/lib/arjdbc/jdbc/adapter.rb +10 -252
- data/lib/arjdbc/jdbc/adapter_java.jar +0 -0
- data/lib/arjdbc/jdbc/connection.rb +6 -0
- data/lib/arjdbc/jdbc.rb +2 -2
- data/lib/arjdbc/mysql/adapter.rb +87 -944
- data/lib/arjdbc/mysql/connection_methods.rb +4 -2
- data/lib/arjdbc/postgresql/adapter.rb +288 -1023
- data/lib/arjdbc/postgresql/base/array_decoder.rb +26 -0
- data/lib/arjdbc/postgresql/base/array_encoder.rb +25 -0
- data/lib/arjdbc/postgresql/base/pgconn.rb +8 -5
- data/lib/arjdbc/postgresql/column.rb +10 -599
- data/lib/arjdbc/postgresql/connection_methods.rb +9 -0
- data/lib/arjdbc/postgresql/name.rb +24 -0
- data/lib/arjdbc/postgresql/oid_types.rb +25 -110
- data/lib/arjdbc/sqlite3/adapter.rb +171 -170
- data/lib/arjdbc/tasks/database_tasks.rb +1 -3
- data/lib/arjdbc/tasks/db2_database_tasks.rb +2 -2
- data/lib/arjdbc/version.rb +1 -1
- data/pom.xml +3 -3
- data/rakelib/02-test.rake +0 -12
- data/rakelib/compile.rake +1 -1
- data/rakelib/db.rake +7 -5
- data/rakelib/rails.rake +63 -64
- data/src/java/arjdbc/firebird/FirebirdRubyJdbcConnection.java +1 -17
- data/src/java/arjdbc/jdbc/RubyJdbcConnection.java +518 -1260
- data/src/java/arjdbc/mysql/MySQLModule.java +3 -3
- data/src/java/arjdbc/mysql/MySQLRubyJdbcConnection.java +53 -134
- data/src/java/arjdbc/postgresql/PostgreSQLRubyJdbcConnection.java +214 -240
- data/src/java/arjdbc/sqlite3/SQLite3Module.java +0 -20
- data/src/java/arjdbc/sqlite3/SQLite3RubyJdbcConnection.java +85 -10
- metadata +20 -34
- data/Appraisals +0 -41
- data/lib/active_record/connection_adapters/oracle_adapter.rb +0 -1
- data/lib/arjdbc/common_jdbc_methods.rb +0 -89
- data/lib/arjdbc/mysql/bulk_change_table.rb +0 -150
- data/lib/arjdbc/mysql/column.rb +0 -162
- data/lib/arjdbc/mysql/explain_support.rb +0 -82
- data/lib/arjdbc/mysql/schema_creation.rb +0 -58
- data/lib/arjdbc/oracle/adapter.rb +0 -952
- data/lib/arjdbc/oracle/column.rb +0 -126
- data/lib/arjdbc/oracle/connection_methods.rb +0 -21
- data/lib/arjdbc/oracle.rb +0 -4
- data/lib/arjdbc/postgresql/_bc_time_cast_patch.rb +0 -21
- data/lib/arjdbc/postgresql/base/oid.rb +0 -412
- data/lib/arjdbc/postgresql/base/schema_definitions.rb +0 -131
- data/lib/arjdbc/postgresql/explain_support.rb +0 -53
- data/lib/arjdbc/postgresql/oid/bytea.rb +0 -2
- data/lib/arjdbc/postgresql/schema_creation.rb +0 -60
- data/lib/arjdbc/tasks/oracle/enhanced_structure_dump.rb +0 -297
- data/lib/arjdbc/tasks/oracle_database_tasks.rb +0 -65
- data/src/java/arjdbc/oracle/OracleModule.java +0 -75
- data/src/java/arjdbc/oracle/OracleRubyJdbcConnection.java +0 -465
@@ -2,30 +2,39 @@
|
|
2
2
|
ArJdbc.load_java_part :PostgreSQL
|
3
3
|
|
4
4
|
require 'ipaddr'
|
5
|
+
require 'active_record/connection_adapters/abstract_adapter'
|
6
|
+
require 'active_record/connection_adapters/postgresql/column'
|
7
|
+
require 'active_record/connection_adapters/postgresql/explain_pretty_printer'
|
8
|
+
require 'active_record/connection_adapters/postgresql/quoting'
|
9
|
+
require 'active_record/connection_adapters/postgresql/referential_integrity'
|
10
|
+
require 'active_record/connection_adapters/postgresql/schema_creation'
|
11
|
+
require 'active_record/connection_adapters/postgresql/schema_dumper'
|
12
|
+
require 'active_record/connection_adapters/postgresql/schema_statements'
|
13
|
+
require 'active_record/connection_adapters/postgresql/type_metadata'
|
14
|
+
require 'active_record/connection_adapters/postgresql/utils'
|
15
|
+
require 'arjdbc/abstract/core'
|
16
|
+
require 'arjdbc/abstract/connection_management'
|
17
|
+
require 'arjdbc/abstract/database_statements'
|
18
|
+
require 'arjdbc/abstract/statement_cache'
|
19
|
+
require 'arjdbc/abstract/transaction_support'
|
20
|
+
require 'arjdbc/postgresql/base/array_decoder'
|
21
|
+
require 'arjdbc/postgresql/base/array_encoder'
|
22
|
+
require 'arjdbc/postgresql/name'
|
5
23
|
|
6
24
|
module ArJdbc
|
7
25
|
# Strives to provide Rails built-in PostgreSQL adapter (API) compatibility.
|
8
26
|
module PostgreSQL
|
9
27
|
|
10
|
-
# @deprecated no longer used
|
11
|
-
# @private
|
12
|
-
AR4_COMPAT = AR40
|
13
|
-
# @deprecated no longer used
|
14
|
-
# @private
|
15
|
-
AR42_COMPAT = AR42
|
16
|
-
|
17
28
|
require 'arjdbc/postgresql/column'
|
18
|
-
require 'arjdbc/postgresql/explain_support'
|
19
|
-
require 'arjdbc/postgresql/schema_creation' # AR 4.x
|
20
29
|
require 'arel/visitors/postgresql_jdbc'
|
21
30
|
# @private
|
22
31
|
IndexDefinition = ::ActiveRecord::ConnectionAdapters::IndexDefinition
|
23
32
|
|
24
33
|
# @private
|
25
|
-
ForeignKeyDefinition = ::ActiveRecord::ConnectionAdapters::ForeignKeyDefinition
|
34
|
+
ForeignKeyDefinition = ::ActiveRecord::ConnectionAdapters::ForeignKeyDefinition
|
26
35
|
|
27
36
|
# @private
|
28
|
-
Type = ::ActiveRecord::Type
|
37
|
+
Type = ::ActiveRecord::Type
|
29
38
|
|
30
39
|
# @see ActiveRecord::ConnectionAdapters::JdbcAdapter#jdbc_connection_class
|
31
40
|
def self.jdbc_connection_class
|
@@ -130,172 +139,60 @@ module ArJdbc
|
|
130
139
|
# @private
|
131
140
|
ActiveRecordError = ::ActiveRecord::ActiveRecordError
|
132
141
|
|
133
|
-
# Maps logical Rails types to PostgreSQL-specific data types.
|
134
|
-
def type_to_sql(type, limit = nil, precision = nil, scale = nil)
|
135
|
-
case type.to_s
|
136
|
-
when 'binary'
|
137
|
-
# PostgreSQL doesn't support limits on binary (bytea) columns.
|
138
|
-
# The hard limit is 1Gb, because of a 32-bit size field, and TOAST.
|
139
|
-
case limit
|
140
|
-
when nil, 0..0x3fffffff; super(type)
|
141
|
-
else raise(ActiveRecordError, "No binary type has byte size #{limit}.")
|
142
|
-
end
|
143
|
-
when 'text'
|
144
|
-
# PostgreSQL doesn't support limits on text columns.
|
145
|
-
# The hard limit is 1Gb, according to section 8.3 in the manual.
|
146
|
-
case limit
|
147
|
-
when nil, 0..0x3fffffff; super(type)
|
148
|
-
else raise(ActiveRecordError, "The limit on text can be at most 1GB - 1byte.")
|
149
|
-
end
|
150
|
-
when 'integer'
|
151
|
-
return 'integer' unless limit
|
152
|
-
|
153
|
-
case limit
|
154
|
-
when 1, 2; 'smallint'
|
155
|
-
when 3, 4; 'integer'
|
156
|
-
when 5..8; 'bigint'
|
157
|
-
else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with precision 0 instead.")
|
158
|
-
end
|
159
|
-
when 'datetime'
|
160
|
-
return super unless precision
|
161
|
-
|
162
|
-
case precision
|
163
|
-
when 0..6; "timestamp(#{precision})"
|
164
|
-
else raise(ActiveRecordError, "No timestamp type has precision of #{precision}. The allowed range of precision is from 0 to 6")
|
165
|
-
end
|
166
|
-
else
|
167
|
-
super
|
168
|
-
end
|
169
|
-
end
|
170
|
-
|
171
|
-
def type_cast(value, column, array_member = false)
|
172
|
-
return super(value, nil) unless column
|
173
|
-
|
174
|
-
case value
|
175
|
-
when String
|
176
|
-
return super(value, column) unless 'bytea' == column.sql_type
|
177
|
-
value # { :value => value, :format => 1 }
|
178
|
-
when Array
|
179
|
-
case column.sql_type
|
180
|
-
when 'point'
|
181
|
-
jdbc_column_class.point_to_string(value)
|
182
|
-
when 'json', 'jsonb'
|
183
|
-
jdbc_column_class.json_to_string(value)
|
184
|
-
else
|
185
|
-
return super(value, column) unless column.array?
|
186
|
-
jdbc_column_class.array_to_string(value, column, self)
|
187
|
-
end
|
188
|
-
when NilClass
|
189
|
-
if column.array? && array_member
|
190
|
-
'NULL'
|
191
|
-
elsif column.array?
|
192
|
-
value
|
193
|
-
else
|
194
|
-
super(value, column)
|
195
|
-
end
|
196
|
-
when Hash
|
197
|
-
case column.sql_type
|
198
|
-
when 'hstore'
|
199
|
-
jdbc_column_class.hstore_to_string(value, array_member)
|
200
|
-
when 'json', 'jsonb'
|
201
|
-
jdbc_column_class.json_to_string(value)
|
202
|
-
else super(value, column)
|
203
|
-
end
|
204
|
-
when IPAddr
|
205
|
-
return super unless column.sql_type == 'inet' || column.sql_type == 'cidr'
|
206
|
-
jdbc_column_class.cidr_to_string(value)
|
207
|
-
when Range
|
208
|
-
return super(value, column) unless /range$/ =~ column.sql_type
|
209
|
-
jdbc_column_class.range_to_string(value)
|
210
|
-
else
|
211
|
-
super(value, column)
|
212
|
-
end
|
213
|
-
end if AR40 && ! AR42
|
214
|
-
|
215
|
-
# @private
|
216
|
-
def _type_cast(value)
|
217
|
-
case value
|
218
|
-
when Type::Binary::Data
|
219
|
-
# Return a bind param hash with format as binary.
|
220
|
-
# See http://deveiate.org/code/pg/PGconn.html#method-i-exec_prepared-doc
|
221
|
-
# for more information
|
222
|
-
{ :value => value.to_s, :format => 1 }
|
223
|
-
when OID::Xml::Data, OID::Bit::Data
|
224
|
-
value.to_s
|
225
|
-
else
|
226
|
-
super
|
227
|
-
end
|
228
|
-
end if AR42
|
229
|
-
private :_type_cast if AR42
|
230
|
-
|
231
142
|
NATIVE_DATABASE_TYPES = {
|
232
|
-
:
|
233
|
-
:
|
234
|
-
:
|
235
|
-
:
|
236
|
-
:
|
237
|
-
:
|
238
|
-
:
|
239
|
-
:
|
240
|
-
:
|
241
|
-
:
|
242
|
-
:
|
243
|
-
:
|
244
|
-
:
|
245
|
-
:
|
246
|
-
#
|
247
|
-
|
248
|
-
|
249
|
-
:
|
250
|
-
:
|
251
|
-
:
|
143
|
+
primary_key: 'bigserial primary key',
|
144
|
+
bigint: { name: 'bigint' },
|
145
|
+
binary: { name: 'bytea' },
|
146
|
+
bit: { name: 'bit' },
|
147
|
+
bit_varying: { name: 'bit varying' },
|
148
|
+
boolean: { name: 'boolean' },
|
149
|
+
box: { name: 'box' },
|
150
|
+
char: { name: 'char' },
|
151
|
+
cidr: { name: 'cidr' },
|
152
|
+
circle: { name: 'circle' },
|
153
|
+
citext: { name: 'citext' },
|
154
|
+
date: { name: 'date' },
|
155
|
+
daterange: { name: 'daterange' },
|
156
|
+
datetime: { name: 'timestamp' },
|
157
|
+
decimal: { name: 'decimal' }, # :limit => 1000
|
158
|
+
float: { name: 'float' },
|
159
|
+
hstore: { name: 'hstore' },
|
160
|
+
inet: { name: 'inet' },
|
161
|
+
int4range: { name: 'int4range' },
|
162
|
+
int8range: { name: 'int8range' },
|
163
|
+
integer: { name: 'integer' },
|
164
|
+
interval: { name: 'interval' }, # This doesn't get added to AR's postgres adapter until 5.1 but it fixes broken tests in 5.0 ...
|
165
|
+
json: { name: 'json' },
|
166
|
+
jsonb: { name: 'jsonb' },
|
167
|
+
line: { name: 'line' },
|
168
|
+
lseg: { name: 'lseg' },
|
169
|
+
ltree: { name: 'ltree' },
|
170
|
+
macaddr: { name: 'macaddr' },
|
171
|
+
money: { name: 'money' },
|
172
|
+
numeric: { name: 'numeric' },
|
173
|
+
numrange: { name: 'numrange' },
|
174
|
+
oid: { name: 'oid' },
|
175
|
+
path: { name: 'path' },
|
176
|
+
point: { name: 'point' },
|
177
|
+
polygon: { name: 'polygon' },
|
178
|
+
string: { name: 'character varying' },
|
179
|
+
text: { name: 'text' },
|
180
|
+
time: { name: 'time' },
|
181
|
+
timestamp: { name: 'timestamp' },
|
182
|
+
tsrange: { name: 'tsrange' },
|
183
|
+
tstzrange: { name: 'tstzrange' },
|
184
|
+
tsvector: { name: 'tsvector' },
|
185
|
+
uuid: { name: 'uuid' },
|
186
|
+
xml: { name: 'xml' }
|
252
187
|
}
|
253
188
|
|
254
|
-
NATIVE_DATABASE_TYPES.update({
|
255
|
-
:tsvector => { :name => "tsvector" },
|
256
|
-
:hstore => { :name => "hstore" },
|
257
|
-
:inet => { :name => "inet" },
|
258
|
-
:cidr => { :name => "cidr" },
|
259
|
-
:macaddr => { :name => "macaddr" },
|
260
|
-
:uuid => { :name => "uuid" },
|
261
|
-
:json => { :name => "json" },
|
262
|
-
:jsonb => { :name => "jsonb" },
|
263
|
-
:ltree => { :name => "ltree" },
|
264
|
-
# ranges :
|
265
|
-
:daterange => { :name => "daterange" },
|
266
|
-
:numrange => { :name => "numrange" },
|
267
|
-
:tsrange => { :name => "tsrange" },
|
268
|
-
:tstzrange => { :name => "tstzrange" },
|
269
|
-
:int4range => { :name => "int4range" },
|
270
|
-
:int8range => { :name => "int8range" },
|
271
|
-
}) if AR40
|
272
|
-
|
273
|
-
NATIVE_DATABASE_TYPES.update(
|
274
|
-
:string => { :name => "character varying" },
|
275
|
-
:bigserial => "bigserial",
|
276
|
-
:bigint => { :name => "bigint" },
|
277
|
-
:bit => { :name => "bit" },
|
278
|
-
:bit_varying => { :name => "bit varying" }
|
279
|
-
) if AR42
|
280
|
-
|
281
189
|
def native_database_types
|
282
190
|
NATIVE_DATABASE_TYPES
|
283
191
|
end
|
284
192
|
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
spec = super
|
289
|
-
spec[:array] = 'true' if column.respond_to?(:array) && column.array
|
290
|
-
spec[:default] = "\"#{column.default_function}\"" if column.default_function
|
291
|
-
spec
|
292
|
-
end if AR40
|
293
|
-
|
294
|
-
# Adds `:array` as a valid migration key.
|
295
|
-
# @override
|
296
|
-
def migration_keys
|
297
|
-
super + [:array]
|
298
|
-
end if AR40
|
193
|
+
def valid_type?(type)
|
194
|
+
!native_database_types[type].nil?
|
195
|
+
end
|
299
196
|
|
300
197
|
# Enable standard-conforming strings if available.
|
301
198
|
def set_standard_conforming_strings
|
@@ -333,15 +230,21 @@ module ArJdbc
|
|
333
230
|
@standard_conforming_strings == true # return false if :unsupported
|
334
231
|
end
|
335
232
|
|
336
|
-
|
337
|
-
def supports_migrations?
|
338
|
-
true
|
339
|
-
end
|
233
|
+
def supports_ddl_transactions?; true end
|
340
234
|
|
341
|
-
|
342
|
-
|
343
|
-
|
344
|
-
|
235
|
+
def supports_explain?; true end
|
236
|
+
|
237
|
+
def supports_expression_index?; true end
|
238
|
+
|
239
|
+
def supports_index_sort_order?; true end
|
240
|
+
|
241
|
+
def supports_partial_index?; true end
|
242
|
+
|
243
|
+
def supports_savepoints?; true end
|
244
|
+
|
245
|
+
def supports_transaction_isolation?(level = nil); true end
|
246
|
+
|
247
|
+
def supports_views?; true end
|
345
248
|
|
346
249
|
# Does PostgreSQL support standard conforming strings?
|
347
250
|
def supports_standard_conforming_strings?
|
@@ -357,56 +260,24 @@ module ArJdbc
|
|
357
260
|
postgresql_version >= 80200
|
358
261
|
end
|
359
262
|
|
360
|
-
def
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
def supports_index_sort_order?; true end
|
365
|
-
|
366
|
-
def supports_partial_index?; true end if AR40
|
263
|
+
def supports_pgcrypto_uuid?
|
264
|
+
postgresql_version >= 90400
|
265
|
+
end
|
367
266
|
|
368
267
|
# Range data-types weren't introduced until PostgreSQL 9.2.
|
369
268
|
def supports_ranges?
|
370
269
|
postgresql_version >= 90200
|
371
|
-
end if AR40
|
372
|
-
|
373
|
-
def supports_transaction_isolation?(level = nil)
|
374
|
-
true
|
375
|
-
end
|
376
|
-
|
377
|
-
# @override
|
378
|
-
def supports_views?; true end
|
379
|
-
|
380
|
-
if ArJdbc::AR50
|
381
|
-
def views
|
382
|
-
select_values("SELECT table_name FROM INFORMATION_SCHEMA.views WHERE table_schema = ANY (current_schemas(false))")
|
383
|
-
end
|
384
|
-
end
|
385
|
-
|
386
|
-
# NOTE: handled by JdbcAdapter we override only to have save-point in logs :
|
387
|
-
|
388
|
-
# @override
|
389
|
-
def supports_savepoints?; true end
|
390
|
-
|
391
|
-
# @override
|
392
|
-
def create_savepoint(name = current_savepoint_name(true))
|
393
|
-
log("SAVEPOINT #{name}", 'Savepoint') { super }
|
394
|
-
end
|
395
|
-
|
396
|
-
# @override
|
397
|
-
def rollback_to_savepoint(name = current_savepoint_name(true))
|
398
|
-
log("ROLLBACK TO SAVEPOINT #{name}", 'Savepoint') { super }
|
399
|
-
end
|
400
|
-
|
401
|
-
# @override
|
402
|
-
def release_savepoint(name = current_savepoint_name(false))
|
403
|
-
log("RELEASE SAVEPOINT #{name}", 'Savepoint') { super }
|
404
270
|
end
|
405
271
|
|
406
272
|
def supports_extensions?
|
407
273
|
postgresql_version >= 90200
|
408
274
|
end # NOTE: only since AR-4.0 but should not hurt on other versions
|
409
275
|
|
276
|
+
# From AR 5.1 postgres_adapter.rb
|
277
|
+
def default_index_type?(index) # :nodoc:
|
278
|
+
index.using == :btree || super
|
279
|
+
end
|
280
|
+
|
410
281
|
def enable_extension(name)
|
411
282
|
execute("CREATE EXTENSION IF NOT EXISTS \"#{name}\"")
|
412
283
|
end
|
@@ -441,159 +312,76 @@ module ArJdbc
|
|
441
312
|
execute "SET SESSION AUTHORIZATION #{user}"
|
442
313
|
end
|
443
314
|
|
315
|
+
# Came from postgres_adapter
|
316
|
+
def get_advisory_lock(lock_id) # :nodoc:
|
317
|
+
unless lock_id.is_a?(Integer) && lock_id.bit_length <= 63
|
318
|
+
raise(ArgumentError, "Postgres requires advisory lock ids to be a signed 64 bit integer")
|
319
|
+
end
|
320
|
+
select_value("SELECT pg_try_advisory_lock(#{lock_id});")
|
321
|
+
end
|
322
|
+
|
323
|
+
# Came from postgres_adapter
|
324
|
+
def release_advisory_lock(lock_id) # :nodoc:
|
325
|
+
unless lock_id.is_a?(Integer) && lock_id.bit_length <= 63
|
326
|
+
raise(ArgumentError, "Postgres requires advisory lock ids to be a signed 64 bit integer")
|
327
|
+
end
|
328
|
+
select_value("SELECT pg_advisory_unlock(#{lock_id})") == 't'.freeze
|
329
|
+
end
|
330
|
+
|
444
331
|
# Returns the configured supported identifier length supported by PostgreSQL,
|
445
332
|
# or report the default of 63 on PostgreSQL 7.x.
|
446
333
|
def table_alias_length
|
447
334
|
@table_alias_length ||= (
|
448
335
|
postgresql_version >= 80000 ?
|
449
|
-
select_one('SHOW max_identifier_length')['max_identifier_length'].to_i :
|
336
|
+
select_one('SHOW max_identifier_length', 'SCHEMA'.freeze)['max_identifier_length'].to_i :
|
450
337
|
63
|
451
338
|
)
|
452
339
|
end
|
340
|
+
alias index_name_length table_alias_length
|
453
341
|
|
454
|
-
def
|
455
|
-
|
456
|
-
|
457
|
-
|
458
|
-
|
459
|
-
|
460
|
-
|
461
|
-
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
if pk && sequence
|
466
|
-
quoted_sequence = quote_column_name(sequence)
|
467
|
-
|
468
|
-
select_value <<-end_sql, 'Reset Sequence'
|
469
|
-
SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)
|
470
|
-
end_sql
|
471
|
-
end
|
472
|
-
end
|
473
|
-
|
474
|
-
# Find a table's primary key and sequence.
|
475
|
-
def pk_and_sequence_for(table)
|
476
|
-
# try looking for a seq with a dependency on the table's primary key :
|
477
|
-
result = select(<<-end_sql, 'PK and Serial Sequence')[0]
|
478
|
-
SELECT attr.attname, seq.relname
|
479
|
-
FROM pg_class seq,
|
480
|
-
pg_attribute attr,
|
481
|
-
pg_depend dep,
|
482
|
-
pg_constraint cons
|
483
|
-
WHERE seq.oid = dep.objid
|
484
|
-
AND seq.relkind = 'S'
|
485
|
-
AND attr.attrelid = dep.refobjid
|
486
|
-
AND attr.attnum = dep.refobjsubid
|
487
|
-
AND attr.attrelid = cons.conrelid
|
488
|
-
AND attr.attnum = cons.conkey[1]
|
489
|
-
AND cons.contype = 'p'
|
490
|
-
AND dep.refobjid = '#{quote_table_name(table)}'::regclass
|
491
|
-
end_sql
|
492
|
-
|
493
|
-
if result.nil? || result.empty?
|
494
|
-
# if that fails, try parsing the primary key's default value :
|
495
|
-
result = select(<<-end_sql, 'PK and Custom Sequence')[0]
|
496
|
-
SELECT attr.attname,
|
497
|
-
CASE
|
498
|
-
WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL
|
499
|
-
WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN
|
500
|
-
substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),
|
501
|
-
strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)
|
502
|
-
ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)
|
503
|
-
END as relname
|
504
|
-
FROM pg_class t
|
505
|
-
JOIN pg_attribute attr ON (t.oid = attrelid)
|
506
|
-
JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
|
507
|
-
JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
|
508
|
-
WHERE t.oid = '#{quote_table_name(table)}'::regclass
|
509
|
-
AND cons.contype = 'p'
|
510
|
-
AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'
|
511
|
-
end_sql
|
342
|
+
def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
|
343
|
+
val = super
|
344
|
+
if !use_insert_returning? && pk
|
345
|
+
unless sequence_name
|
346
|
+
table_ref = extract_table_ref_from_insert_sql(sql)
|
347
|
+
sequence_name = default_sequence_name(table_ref, pk)
|
348
|
+
return val unless sequence_name
|
349
|
+
end
|
350
|
+
last_insert_id_result(sequence_name)
|
351
|
+
else
|
352
|
+
val
|
512
353
|
end
|
513
|
-
|
514
|
-
[ result['attname'], result['relname'] ]
|
515
|
-
rescue
|
516
|
-
nil
|
517
354
|
end
|
518
355
|
|
519
|
-
def
|
520
|
-
|
521
|
-
|
522
|
-
FROM pg_attribute attr
|
523
|
-
INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey)
|
524
|
-
WHERE cons.contype = 'p' AND cons.conrelid = '#{quote_table_name(table)}'::regclass
|
525
|
-
end_sql
|
526
|
-
|
527
|
-
result && result['attname']
|
528
|
-
# pk_and_sequence = pk_and_sequence_for(table)
|
529
|
-
# pk_and_sequence && pk_and_sequence.first
|
356
|
+
def explain(arel, binds = [])
|
357
|
+
sql = "EXPLAIN #{to_sql(arel, binds)}"
|
358
|
+
ActiveRecord::ConnectionAdapters::PostgreSQL::ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', binds))
|
530
359
|
end
|
531
360
|
|
532
|
-
|
533
|
-
|
534
|
-
|
535
|
-
|
536
|
-
|
537
|
-
end
|
538
|
-
|
539
|
-
if pk && use_insert_returning? # && id_value.nil?
|
540
|
-
select_value("#{to_sql(sql, binds)} RETURNING #{quote_column_name(pk)}")
|
361
|
+
# Take an id from the result of an INSERT query.
|
362
|
+
# @return [Integer, NilClass]
|
363
|
+
def last_inserted_id(result)
|
364
|
+
if result.is_a?(Hash) || result.is_a?(ActiveRecord::Result)
|
365
|
+
result.first.first[1] # .first = { "id"=>1 } .first = [ "id", 1 ]
|
541
366
|
else
|
542
|
-
|
543
|
-
unless id_value
|
544
|
-
table_ref ||= extract_table_ref_from_insert_sql(sql)
|
545
|
-
# If neither PK nor sequence name is given, look them up.
|
546
|
-
if table_ref && ! ( pk ||= primary_key(table_ref) ) && ! sequence_name
|
547
|
-
pk, sequence_name = pk_and_sequence_for(table_ref)
|
548
|
-
end
|
549
|
-
# If a PK is given, fallback to default sequence name.
|
550
|
-
# Don't fetch last insert id for a table without a PK.
|
551
|
-
if pk && sequence_name ||= default_sequence_name(table_ref, pk)
|
552
|
-
id_value = last_insert_id(table_ref, sequence_name)
|
553
|
-
end
|
554
|
-
end
|
555
|
-
id_value
|
367
|
+
result
|
556
368
|
end
|
557
369
|
end
|
558
370
|
|
559
|
-
#
|
560
|
-
|
561
|
-
unless pk
|
371
|
+
def sql_for_insert(sql, pk, id_value, sequence_name, binds) # :nodoc:
|
372
|
+
if pk.nil?
|
562
373
|
# Extract the table from the insert sql. Yuck.
|
563
374
|
table_ref = extract_table_ref_from_insert_sql(sql)
|
564
375
|
pk = primary_key(table_ref) if table_ref
|
565
376
|
end
|
566
377
|
|
378
|
+
pk = nil if pk.is_a?(Array)
|
379
|
+
|
567
380
|
if pk && use_insert_returning?
|
568
381
|
sql = "#{sql} RETURNING #{quote_column_name(pk)}"
|
569
382
|
end
|
570
383
|
|
571
|
-
|
572
|
-
end
|
573
|
-
|
574
|
-
# @override due RETURNING clause
|
575
|
-
def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
|
576
|
-
# NOTE: 3.2 does not pass the PK on #insert (passed only into #sql_for_insert) :
|
577
|
-
# sql, binds = sql_for_insert(to_sql(arel, binds), pk, id_value, sequence_name, binds)
|
578
|
-
# 3.2 :
|
579
|
-
# value = exec_insert(sql, name, binds)
|
580
|
-
# 4.x :
|
581
|
-
# value = exec_insert(sql, name, binds, pk, sequence_name)
|
582
|
-
if use_insert_returning? && ( pk || (sql.is_a?(String) && sql =~ /RETURNING "?\S+"?$/) )
|
583
|
-
exec_query(sql, name, binds) # due RETURNING clause returns a result set
|
584
|
-
else
|
585
|
-
result = super
|
586
|
-
if pk
|
587
|
-
unless sequence_name
|
588
|
-
table_ref = extract_table_ref_from_insert_sql(sql)
|
589
|
-
sequence_name = default_sequence_name(table_ref, pk)
|
590
|
-
return result unless sequence_name
|
591
|
-
end
|
592
|
-
last_insert_id_result(sequence_name)
|
593
|
-
else
|
594
|
-
result
|
595
|
-
end
|
596
|
-
end
|
384
|
+
super
|
597
385
|
end
|
598
386
|
|
599
387
|
# @note Only for "better" AR 4.0 compatibility.
|
@@ -601,103 +389,29 @@ module ArJdbc
|
|
601
389
|
def query(sql, name = nil)
|
602
390
|
log(sql, name) do
|
603
391
|
result = []
|
604
|
-
@connection.execute_query_raw(sql,
|
605
|
-
|
392
|
+
@connection.execute_query_raw(sql, []) do |*values|
|
393
|
+
# We need to use #deep_dup here because it appears that
|
394
|
+
# the java method is reusing an object in some cases
|
395
|
+
# which makes all of the entries in the "result"
|
396
|
+
# array end up with the same values as the last row
|
397
|
+
result << values.deep_dup
|
606
398
|
end
|
607
399
|
result
|
608
400
|
end
|
609
401
|
end
|
610
402
|
|
611
|
-
|
612
|
-
|
613
|
-
|
614
|
-
|
615
|
-
|
616
|
-
|
617
|
-
'SCHEMA')
|
618
|
-
end
|
619
|
-
|
620
|
-
# Returns true if schema exists.
|
621
|
-
def schema_exists?(name)
|
622
|
-
select_value("SELECT COUNT(*) FROM pg_namespace WHERE nspname = '#{name}'", 'SCHEMA').to_i > 0
|
623
|
-
end
|
624
|
-
|
625
|
-
# Returns the current schema name.
|
626
|
-
def current_schema
|
627
|
-
select_value('SELECT current_schema', 'SCHEMA')
|
628
|
-
end
|
629
|
-
|
630
|
-
# current database name
|
631
|
-
def current_database
|
632
|
-
select_value('SELECT current_database()', 'SCHEMA')
|
633
|
-
end
|
634
|
-
|
635
|
-
# Returns the current database encoding format.
|
636
|
-
def encoding
|
637
|
-
select_value(
|
638
|
-
"SELECT pg_encoding_to_char(pg_database.encoding)" <<
|
639
|
-
" FROM pg_database" <<
|
640
|
-
" WHERE pg_database.datname LIKE '#{current_database}'",
|
641
|
-
'SCHEMA')
|
642
|
-
end
|
643
|
-
|
644
|
-
# Returns the current database collation.
|
645
|
-
def collation
|
646
|
-
select_value(
|
647
|
-
"SELECT pg_database.datcollate" <<
|
648
|
-
" FROM pg_database" <<
|
649
|
-
" WHERE pg_database.datname LIKE '#{current_database}'",
|
650
|
-
'SCHEMA')
|
651
|
-
end
|
652
|
-
|
653
|
-
# Returns the current database ctype.
|
654
|
-
def ctype
|
655
|
-
select_value(
|
656
|
-
"SELECT pg_database.datctype FROM pg_database WHERE pg_database.datname LIKE '#{current_database}'",
|
657
|
-
'SCHEMA')
|
658
|
-
end
|
659
|
-
|
660
|
-
# Returns the active schema search path.
|
661
|
-
def schema_search_path
|
662
|
-
@schema_search_path ||= select_value('SHOW search_path', 'SCHEMA')
|
663
|
-
end
|
664
|
-
|
665
|
-
# Sets the schema search path to a string of comma-separated schema names.
|
666
|
-
# Names beginning with $ have to be quoted (e.g. $user => '$user').
|
667
|
-
# See: http://www.postgresql.org/docs/current/static/ddl-schemas.html
|
668
|
-
#
|
669
|
-
# This should be not be called manually but set in database.yml.
|
670
|
-
def schema_search_path=(schema_csv)
|
671
|
-
if schema_csv
|
672
|
-
execute "SET search_path TO #{schema_csv}"
|
673
|
-
@schema_search_path = schema_csv
|
674
|
-
end
|
675
|
-
end
|
676
|
-
|
677
|
-
# Take an id from the result of an INSERT query.
|
678
|
-
# @return [Integer, NilClass]
|
679
|
-
def last_inserted_id(result)
|
680
|
-
return nil if result.nil?
|
681
|
-
return result if result.is_a? Integer
|
682
|
-
# <ActiveRecord::Result @hash_rows=nil, @columns=["id"], @rows=[[3]]>
|
683
|
-
# but it will work with [{ 'id' => 1 }] Hash wrapped results as well
|
684
|
-
result.first.first[1] # .first = { "id"=>1 } .first = [ "id", 1 ]
|
685
|
-
end
|
686
|
-
|
687
|
-
def last_insert_id(table, sequence_name = nil)
|
688
|
-
sequence_name = table if sequence_name.nil? # AR-4.0 1 argument
|
689
|
-
last_insert_id_result(sequence_name)
|
403
|
+
def reset!
|
404
|
+
clear_cache!
|
405
|
+
reset_transaction
|
406
|
+
@connection.rollback # Have to deal with rollbacks differently than the AR adapter
|
407
|
+
@connection.execute 'DISCARD ALL'
|
408
|
+
configure_connection
|
690
409
|
end
|
691
410
|
|
692
411
|
def last_insert_id_result(sequence_name)
|
693
412
|
select_value("SELECT currval('#{sequence_name}')", 'SQL')
|
694
413
|
end
|
695
414
|
|
696
|
-
def recreate_database(name, options = {})
|
697
|
-
drop_database(name)
|
698
|
-
create_database(name, options)
|
699
|
-
end
|
700
|
-
|
701
415
|
# Create a new PostgreSQL database. Options include <tt>:owner</tt>, <tt>:template</tt>,
|
702
416
|
# <tt>:encoding</tt>, <tt>:collation</tt>, <tt>:ctype</tt>,
|
703
417
|
# <tt>:tablespace</tt>, and <tt>:connection_limit</tt> (note that MySQL uses
|
@@ -733,56 +447,10 @@ module ArJdbc
|
|
733
447
|
execute "CREATE DATABASE #{quote_table_name(name)}#{option_string}"
|
734
448
|
end
|
735
449
|
|
736
|
-
def drop_database(name)
|
737
|
-
execute "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
|
738
|
-
end
|
739
|
-
|
740
|
-
# Creates a schema for the given schema name.
|
741
|
-
def create_schema(schema_name, pg_username = nil)
|
742
|
-
if pg_username.nil? # AR 4.0 compatibility - accepts only single argument
|
743
|
-
execute "CREATE SCHEMA #{schema_name}"
|
744
|
-
else
|
745
|
-
execute("CREATE SCHEMA \"#{schema_name}\" AUTHORIZATION \"#{pg_username}\"")
|
746
|
-
end
|
747
|
-
end
|
748
|
-
|
749
|
-
# Drops the schema for the given schema name.
|
750
|
-
def drop_schema schema_name
|
751
|
-
execute "DROP SCHEMA #{schema_name} CASCADE"
|
752
|
-
end
|
753
|
-
|
754
450
|
def all_schemas
|
755
451
|
select('SELECT nspname FROM pg_namespace').map { |row| row["nspname"] }
|
756
452
|
end
|
757
453
|
|
758
|
-
# @deprecated no longer used - handled with (AR built-in) Rake tasks
|
759
|
-
def structure_dump
|
760
|
-
database = @config[:database]
|
761
|
-
if database.nil?
|
762
|
-
if @config[:url] =~ /\/([^\/]*)$/
|
763
|
-
database = $1
|
764
|
-
else
|
765
|
-
raise "Could not figure out what database this url is for #{@config["url"]}"
|
766
|
-
end
|
767
|
-
end
|
768
|
-
|
769
|
-
ENV['PGHOST'] = @config[:host] if @config[:host]
|
770
|
-
ENV['PGPORT'] = @config[:port].to_s if @config[:port]
|
771
|
-
ENV['PGPASSWORD'] = @config[:password].to_s if @config[:password]
|
772
|
-
search_path = "--schema=#{@config[:schema_search_path]}" if @config[:schema_search_path]
|
773
|
-
|
774
|
-
@connection.connection.close
|
775
|
-
begin
|
776
|
-
definition = `pg_dump -i -U "#{@config[:username]}" -s -x -O #{search_path} #{database}`
|
777
|
-
raise "Error dumping database" if $?.exitstatus == 1
|
778
|
-
|
779
|
-
# need to patch away any references to SQL_ASCII as it breaks the JDBC driver
|
780
|
-
definition.gsub(/SQL_ASCII/, 'UNICODE')
|
781
|
-
ensure
|
782
|
-
reconnect!
|
783
|
-
end
|
784
|
-
end
|
785
|
-
|
786
454
|
# Returns the current client message level.
|
787
455
|
def client_min_messages
|
788
456
|
return nil if redshift? # not supported on Redshift
|
@@ -797,42 +465,6 @@ module ArJdbc
|
|
797
465
|
execute("SET client_min_messages TO '#{level}'", 'SCHEMA')
|
798
466
|
end
|
799
467
|
|
800
|
-
# Gets the maximum number columns postgres has, default 32
|
801
|
-
def multi_column_index_limit
|
802
|
-
defined?(@multi_column_index_limit) && @multi_column_index_limit || 32
|
803
|
-
end
|
804
|
-
|
805
|
-
# Sets the maximum number columns postgres has, default 32
|
806
|
-
def multi_column_index_limit=(limit)
|
807
|
-
@multi_column_index_limit = limit
|
808
|
-
end
|
809
|
-
|
810
|
-
# @override
|
811
|
-
def distinct(columns, orders)
|
812
|
-
"DISTINCT #{columns_for_distinct(columns, orders)}"
|
813
|
-
end
|
814
|
-
|
815
|
-
# PostgreSQL requires the ORDER BY columns in the select list for distinct
|
816
|
-
# queries, and requires that the ORDER BY include the distinct column.
|
817
|
-
# @override Since AR 4.0 (on 4.1 {#distinct} is gone and won't be called).
|
818
|
-
def columns_for_distinct(columns, orders)
|
819
|
-
if orders.is_a?(String)
|
820
|
-
orders = orders.split(','); orders.each(&:strip!)
|
821
|
-
end
|
822
|
-
|
823
|
-
order_columns = orders.reject(&:blank?).map! do |column|
|
824
|
-
column = column.is_a?(String) ? column.dup : column.to_sql # AREL node
|
825
|
-
column.gsub!(/\s+(?:ASC|DESC)\s*/i, '') # remove any ASC/DESC modifiers
|
826
|
-
column.gsub!(/\s*NULLS\s+(?:FIRST|LAST)?\s*/i, '')
|
827
|
-
column
|
828
|
-
end
|
829
|
-
order_columns.reject!(&:empty?)
|
830
|
-
i = -1; order_columns.map! { |column| "#{column} AS alias_#{i += 1}" }
|
831
|
-
|
832
|
-
columns = [ columns ]; columns.flatten!
|
833
|
-
columns.push( *order_columns ).join(', ')
|
834
|
-
end
|
835
|
-
|
836
468
|
# ORDER BY clause for the passed order option.
|
837
469
|
#
|
838
470
|
# PostgreSQL does not allow arbitrary ordering when using DISTINCT ON,
|
@@ -848,94 +480,6 @@ module ArJdbc
|
|
848
480
|
sql.replace "SELECT * FROM (#{sql}) AS id_list ORDER BY #{order}"
|
849
481
|
end
|
850
482
|
|
851
|
-
# @return [String]
|
852
|
-
# @override
|
853
|
-
def quote(value, column = nil)
|
854
|
-
return super unless column && column.type
|
855
|
-
return value if sql_literal?(value)
|
856
|
-
|
857
|
-
case value
|
858
|
-
when Float
|
859
|
-
if value.infinite? && ( column.type == :datetime || column.type == :timestamp )
|
860
|
-
"'#{value.to_s.downcase}'"
|
861
|
-
elsif value.infinite? || value.nan?
|
862
|
-
"'#{value.to_s}'"
|
863
|
-
else super
|
864
|
-
end
|
865
|
-
when Numeric
|
866
|
-
if column.respond_to?(:sql_type) && column.sql_type == 'money'
|
867
|
-
"'#{value}'"
|
868
|
-
elsif column.type == :string || column.type == :text
|
869
|
-
"'#{value}'"
|
870
|
-
else super
|
871
|
-
end
|
872
|
-
when String
|
873
|
-
return "E'#{escape_bytea(value)}'::bytea" if column.type == :binary
|
874
|
-
return "xml '#{quote_string(value)}'" if column.type == :xml
|
875
|
-
sql_type = column.respond_to?(:sql_type) && column.sql_type
|
876
|
-
sql_type && sql_type[0, 3] == 'bit' ? quote_bit(value) : super
|
877
|
-
when Array
|
878
|
-
if AR40 && column.array? # will be always falsy in AR < 4.0
|
879
|
-
"'#{jdbc_column_class.array_to_string(value, column, self).gsub(/'/, "''")}'"
|
880
|
-
elsif column.type == :json # only in AR-4.0
|
881
|
-
super(jdbc_column_class.json_to_string(value), column)
|
882
|
-
elsif column.type == :jsonb # only in AR-4.0
|
883
|
-
super(jdbc_column_class.json_to_string(value), column)
|
884
|
-
elsif column.type == :point # only in AR-4.0
|
885
|
-
super(jdbc_column_class.point_to_string(value), column)
|
886
|
-
else super
|
887
|
-
end
|
888
|
-
when Hash
|
889
|
-
if column.type == :hstore # only in AR-4.0
|
890
|
-
super(jdbc_column_class.hstore_to_string(value), column)
|
891
|
-
elsif column.type == :json # only in AR-4.0
|
892
|
-
super(jdbc_column_class.json_to_string(value), column)
|
893
|
-
elsif column.type == :jsonb # only in AR-4.0
|
894
|
-
super(jdbc_column_class.json_to_string(value), column)
|
895
|
-
else super
|
896
|
-
end
|
897
|
-
when Range
|
898
|
-
sql_type = column.respond_to?(:sql_type) && column.sql_type
|
899
|
-
if sql_type && sql_type[-5, 5] == 'range' && AR40
|
900
|
-
escaped = quote_string(jdbc_column_class.range_to_string(value))
|
901
|
-
"'#{escaped}'::#{sql_type}"
|
902
|
-
else super
|
903
|
-
end
|
904
|
-
when IPAddr
|
905
|
-
if column.type == :inet || column.type == :cidr # only in AR-4.0
|
906
|
-
super(jdbc_column_class.cidr_to_string(value), column)
|
907
|
-
else super
|
908
|
-
end
|
909
|
-
else
|
910
|
-
super
|
911
|
-
end
|
912
|
-
end unless AR42
|
913
|
-
|
914
|
-
# @private
|
915
|
-
def _quote(value)
|
916
|
-
case value
|
917
|
-
when Type::Binary::Data
|
918
|
-
"E'#{escape_bytea(value.to_s)}'"
|
919
|
-
when OID::Xml::Data
|
920
|
-
"xml '#{quote_string(value.to_s)}'"
|
921
|
-
when OID::Bit::Data
|
922
|
-
if value.binary?
|
923
|
-
"B'#{value}'"
|
924
|
-
elsif value.hex?
|
925
|
-
"X'#{value}'"
|
926
|
-
end
|
927
|
-
when Float
|
928
|
-
if value.infinite? || value.nan?
|
929
|
-
"'#{value}'"
|
930
|
-
else
|
931
|
-
super
|
932
|
-
end
|
933
|
-
else
|
934
|
-
super
|
935
|
-
end
|
936
|
-
end if AR42
|
937
|
-
private :_quote if AR42
|
938
|
-
|
939
483
|
# Quotes a string, escaping any ' (single quote) and \ (backslash) chars.
|
940
484
|
# @return [String]
|
941
485
|
# @override
|
@@ -947,24 +491,6 @@ module ArJdbc
|
|
947
491
|
quoted
|
948
492
|
end
|
949
493
|
|
950
|
-
# @return [String]
|
951
|
-
def quote_bit(value)
|
952
|
-
case value
|
953
|
-
# NOTE: as reported with #60 this is not quite "right" :
|
954
|
-
# "0103" will be treated as hexadecimal string
|
955
|
-
# "0102" will be treated as hexadecimal string
|
956
|
-
# "0101" will be treated as binary string
|
957
|
-
# "0100" will be treated as binary string
|
958
|
-
# ... but is kept due Rails compatibility
|
959
|
-
when /\A[01]*\Z/ then "B'#{value}'" # Bit-string notation
|
960
|
-
when /\A[0-9A-F]*\Z/i then "X'#{value}'" # Hexadecimal notation
|
961
|
-
end
|
962
|
-
end
|
963
|
-
|
964
|
-
def quote_bit(value)
|
965
|
-
"B'#{value}'"
|
966
|
-
end if AR40
|
967
|
-
|
968
494
|
def escape_bytea(string)
|
969
495
|
return unless string
|
970
496
|
if supports_hex_escaped_bytea?
|
@@ -988,285 +514,19 @@ module ArJdbc
|
|
988
514
|
end
|
989
515
|
end
|
990
516
|
|
991
|
-
# @override
|
992
|
-
def quote_table_name_for_assignment(table, attr)
|
993
|
-
quote_column_name(attr)
|
994
|
-
end if AR40
|
995
|
-
|
996
517
|
# @override
|
997
518
|
def quote_column_name(name)
|
998
519
|
%("#{name.to_s.gsub("\"", "\"\"")}")
|
999
520
|
end
|
1000
|
-
|
1001
|
-
# @private
|
1002
|
-
def quote_default_value(value, column)
|
1003
|
-
# Do not quote function default values for UUID columns
|
1004
|
-
if column.type == :uuid && value =~ /\(\)/
|
1005
|
-
value
|
1006
|
-
else
|
1007
|
-
quote(value, column)
|
1008
|
-
end
|
1009
|
-
end
|
1010
|
-
|
1011
|
-
# Quote date/time values for use in SQL input.
|
1012
|
-
# Includes microseconds if the value is a Time responding to `usec`.
|
1013
|
-
# @override
|
1014
|
-
def quoted_date(value)
|
1015
|
-
result = super
|
1016
|
-
if value.acts_like?(:time) && value.respond_to?(:usec) && !AR50
|
1017
|
-
result = "#{result}.#{sprintf("%06d", value.usec)}"
|
1018
|
-
end
|
1019
|
-
result = "#{result.sub(/^-/, '')} BC" if value.year < 0
|
1020
|
-
result
|
1021
|
-
end if ::ActiveRecord::VERSION::MAJOR >= 3
|
1022
|
-
|
1023
|
-
# @override
|
1024
|
-
def supports_disable_referential_integrity?
|
1025
|
-
true
|
1026
|
-
end
|
1027
|
-
|
1028
|
-
def disable_referential_integrity
|
1029
|
-
if supports_disable_referential_integrity?
|
1030
|
-
begin
|
1031
|
-
execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(";"))
|
1032
|
-
rescue
|
1033
|
-
execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER USER" }.join(";"))
|
1034
|
-
end
|
1035
|
-
end
|
1036
|
-
yield
|
1037
|
-
ensure
|
1038
|
-
if supports_disable_referential_integrity?
|
1039
|
-
begin
|
1040
|
-
execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(";"))
|
1041
|
-
rescue
|
1042
|
-
execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER USER" }.join(";"))
|
1043
|
-
end
|
1044
|
-
end
|
1045
|
-
end
|
1046
|
-
|
1047
|
-
def rename_table(table_name, new_name)
|
1048
|
-
execute "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
|
1049
|
-
pk, seq = pk_and_sequence_for(new_name)
|
1050
|
-
if seq == "#{table_name}_#{pk}_seq"
|
1051
|
-
new_seq = "#{new_name}_#{pk}_seq"
|
1052
|
-
idx = "#{table_name}_pkey"
|
1053
|
-
new_idx = "#{new_name}_pkey"
|
1054
|
-
execute "ALTER TABLE #{quote_table_name(seq)} RENAME TO #{quote_table_name(new_seq)}"
|
1055
|
-
execute "ALTER INDEX #{quote_table_name(idx)} RENAME TO #{quote_table_name(new_idx)}"
|
1056
|
-
end
|
1057
|
-
rename_table_indexes(table_name, new_name) if respond_to?(:rename_table_indexes) # AR-4.0 SchemaStatements
|
1058
|
-
end
|
1059
|
-
|
1060
|
-
# Adds a new column to the named table.
|
1061
|
-
# See TableDefinition#column for details of the options you can use.
|
1062
|
-
def add_column(table_name, column_name, type, options = {})
|
1063
|
-
default = options[:default]
|
1064
|
-
notnull = options[:null] == false
|
1065
|
-
|
1066
|
-
sql_type = type_to_sql(type, options[:limit], options[:precision], options[:scale])
|
1067
|
-
sql_type << "[]" if options[:array]
|
1068
|
-
|
1069
|
-
# Add the column.
|
1070
|
-
execute("ALTER TABLE #{quote_table_name(table_name)} ADD COLUMN #{quote_column_name(column_name)} #{sql_type}")
|
1071
|
-
|
1072
|
-
change_column_default(table_name, column_name, default) if options_include_default?(options)
|
1073
|
-
change_column_null(table_name, column_name, false, default) if notnull
|
1074
|
-
end if ::ActiveRecord::VERSION::MAJOR < 4
|
1075
|
-
|
1076
|
-
# @private documented above
|
1077
|
-
def add_column(table_name, column_name, type, options = {}); super end if AR42
|
1078
|
-
|
1079
|
-
# Changes the column of a table.
|
1080
|
-
def change_column(table_name, column_name, type, options = {})
|
1081
|
-
quoted_table_name = quote_table_name(table_name)
|
1082
|
-
quoted_column_name = quote_table_name(column_name)
|
1083
|
-
|
1084
|
-
sql_type = type_to_sql(type, options[:limit], options[:precision], options[:scale])
|
1085
|
-
sql_type << "[]" if options[:array]
|
1086
|
-
|
1087
|
-
sql = "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quoted_column_name} TYPE #{sql_type}"
|
1088
|
-
sql << " USING #{options[:using]}" if options[:using]
|
1089
|
-
if options[:cast_as]
|
1090
|
-
sql << " USING CAST(#{quoted_column_name} AS #{type_to_sql(options[:cast_as], options[:limit], options[:precision], options[:scale])})"
|
1091
|
-
end
|
1092
|
-
begin
|
1093
|
-
execute sql
|
1094
|
-
rescue ActiveRecord::StatementInvalid => e
|
1095
|
-
raise e if postgresql_version > 80000
|
1096
|
-
change_column_pg7(table_name, column_name, type, options)
|
1097
|
-
end
|
1098
|
-
|
1099
|
-
change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
|
1100
|
-
change_column_null(table_name, column_name, options[:null], options[:default]) if options.key?(:null)
|
1101
|
-
end # unless const_defined? :SchemaCreation
|
1102
|
-
|
1103
|
-
def change_column_pg7(table_name, column_name, type, options)
|
1104
|
-
quoted_table_name = quote_table_name(table_name)
|
1105
|
-
# This is PostgreSQL 7.x, so we have to use a more arcane way of doing it.
|
1106
|
-
begin
|
1107
|
-
begin_db_transaction
|
1108
|
-
tmp_column_name = "#{column_name}_ar_tmp"
|
1109
|
-
add_column(table_name, tmp_column_name, type, options)
|
1110
|
-
execute "UPDATE #{quoted_table_name} SET #{quote_column_name(tmp_column_name)} = CAST(#{quote_column_name(column_name)} AS #{sql_type})"
|
1111
|
-
remove_column(table_name, column_name)
|
1112
|
-
rename_column(table_name, tmp_column_name, column_name)
|
1113
|
-
commit_db_transaction
|
1114
|
-
rescue
|
1115
|
-
rollback_db_transaction
|
1116
|
-
end
|
1117
|
-
end
|
1118
|
-
private :change_column_pg7
|
1119
|
-
|
1120
|
-
# Changes the default value of a table column.
|
1121
|
-
def change_column_default(table_name, column_name, default)
|
1122
|
-
if column = column_for(table_name, column_name) # (backwards) compatible with AR 3.x - 4.x
|
1123
|
-
execute "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} SET DEFAULT #{quote_default_value(default, column)}"
|
1124
|
-
else
|
1125
|
-
execute "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} SET DEFAULT #{quote(default)}"
|
1126
|
-
end
|
1127
|
-
end unless AR42 # unless const_defined? :SchemaCreation
|
1128
|
-
|
1129
|
-
# @private documented above
|
1130
|
-
def change_column_default(table_name, column_name, default)
|
1131
|
-
return unless column = column_for(table_name, column_name)
|
1132
|
-
|
1133
|
-
alter_column_query = "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} %s"
|
1134
|
-
if default.nil?
|
1135
|
-
# <tt>DEFAULT NULL</tt> results in the same behavior as <tt>DROP DEFAULT</tt>. However, PostgreSQL will
|
1136
|
-
# cast the default to the columns type, which leaves us with a default like "default NULL::character varying".
|
1137
|
-
execute alter_column_query % "DROP DEFAULT"
|
1138
|
-
else
|
1139
|
-
execute alter_column_query % "SET DEFAULT #{quote_default_value(default, column)}"
|
1140
|
-
end
|
1141
|
-
end if AR42
|
1142
|
-
|
1143
|
-
# @private
|
1144
|
-
def change_column_null(table_name, column_name, null, default = nil)
|
1145
|
-
unless null || default.nil?
|
1146
|
-
if column = column_for(table_name, column_name) # (backwards) compatible with AR 3.x - 4.x
|
1147
|
-
execute "UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_value(default, column)} WHERE #{quote_column_name(column_name)} IS NULL"
|
1148
|
-
else
|
1149
|
-
execute "UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL"
|
1150
|
-
end
|
1151
|
-
end
|
1152
|
-
execute("ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL")
|
1153
|
-
end unless AR42 # unless const_defined? :SchemaCreation
|
1154
|
-
|
1155
|
-
# @private
|
1156
|
-
def change_column_null(table_name, column_name, null, default = nil)
|
1157
|
-
unless null || default.nil?
|
1158
|
-
column = column_for(table_name, column_name)
|
1159
|
-
execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_value(default, column)} WHERE #{quote_column_name(column_name)} IS NULL") if column
|
1160
|
-
end
|
1161
|
-
execute("ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL")
|
1162
|
-
end if AR42
|
1163
|
-
|
1164
|
-
def rename_column(table_name, column_name, new_column_name)
|
1165
|
-
execute "ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
|
1166
|
-
rename_column_indexes(table_name, column_name, new_column_name) if respond_to?(:rename_column_indexes) # AR-4.0 SchemaStatements
|
1167
|
-
end # unless const_defined? :SchemaCreation
|
1168
|
-
|
1169
|
-
def add_index(table_name, column_name, options = {})
|
1170
|
-
index_name, index_type, index_columns, index_options, index_algorithm, index_using = add_index_options(table_name, column_name, options)
|
1171
|
-
execute "CREATE #{index_type} INDEX #{index_algorithm} #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} #{index_using} (#{index_columns})#{index_options}"
|
1172
|
-
end if AR40
|
521
|
+
alias_method :quote_schema_name, :quote_column_name
|
1173
522
|
|
1174
523
|
def remove_index!(table_name, index_name)
|
1175
524
|
execute "DROP INDEX #{quote_table_name(index_name)}"
|
1176
525
|
end
|
1177
526
|
|
1178
|
-
def rename_index(table_name, old_name, new_name)
|
1179
|
-
validate_index_length!(table_name, new_name) if respond_to?(:validate_index_length!)
|
1180
|
-
|
1181
|
-
execute "ALTER INDEX #{quote_column_name(old_name)} RENAME TO #{quote_table_name(new_name)}"
|
1182
|
-
end
|
1183
|
-
|
1184
527
|
# @override
|
1185
528
|
def supports_foreign_keys?; true end
|
1186
529
|
|
1187
|
-
def foreign_keys(table_name)
|
1188
|
-
fk_info = select_all "" <<
|
1189
|
-
"SELECT t2.oid::regclass::text AS to_table, a1.attname AS column, a2.attname AS primary_key, c.conname AS name, c.confupdtype AS on_update, c.confdeltype AS on_delete " <<
|
1190
|
-
"FROM pg_constraint c " <<
|
1191
|
-
"JOIN pg_class t1 ON c.conrelid = t1.oid " <<
|
1192
|
-
"JOIN pg_class t2 ON c.confrelid = t2.oid " <<
|
1193
|
-
"JOIN pg_attribute a1 ON a1.attnum = c.conkey[1] AND a1.attrelid = t1.oid " <<
|
1194
|
-
"JOIN pg_attribute a2 ON a2.attnum = c.confkey[1] AND a2.attrelid = t2.oid " <<
|
1195
|
-
"JOIN pg_namespace t3 ON c.connamespace = t3.oid " <<
|
1196
|
-
"WHERE c.contype = 'f' " <<
|
1197
|
-
" AND t1.relname = #{quote(table_name)} " <<
|
1198
|
-
" AND t3.nspname = ANY (current_schemas(false)) " <<
|
1199
|
-
"ORDER BY c.conname "
|
1200
|
-
|
1201
|
-
fk_info.map! do |row|
|
1202
|
-
options = {
|
1203
|
-
:column => row['column'], :name => row['name'], :primary_key => row['primary_key']
|
1204
|
-
}
|
1205
|
-
options[:on_delete] = extract_foreign_key_action(row['on_delete'])
|
1206
|
-
options[:on_update] = extract_foreign_key_action(row['on_update'])
|
1207
|
-
|
1208
|
-
ForeignKeyDefinition.new(table_name, row['to_table'], options)
|
1209
|
-
end
|
1210
|
-
end if defined? ForeignKeyDefinition
|
1211
|
-
|
1212
|
-
# @private
|
1213
|
-
def extract_foreign_key_action(specifier)
|
1214
|
-
case specifier
|
1215
|
-
when 'c'; :cascade
|
1216
|
-
when 'n'; :nullify
|
1217
|
-
when 'r'; :restrict
|
1218
|
-
end
|
1219
|
-
end
|
1220
|
-
private :extract_foreign_key_action
|
1221
|
-
|
1222
|
-
def index_name_length
|
1223
|
-
63
|
1224
|
-
end
|
1225
|
-
|
1226
|
-
# Returns the list of all column definitions for a table.
|
1227
|
-
def columns(table_name, name = nil)
|
1228
|
-
column = jdbc_column_class
|
1229
|
-
column_definitions(table_name).map! do |row|
|
1230
|
-
# |name, type, default, notnull, oid, fmod|
|
1231
|
-
name = row[0]; type = row[1]; default = row[2]
|
1232
|
-
notnull = row[3]; oid = row[4]; fmod = row[5]
|
1233
|
-
# oid = OID::TYPE_MAP.fetch(oid.to_i, fmod.to_i) { OID::Identity.new }
|
1234
|
-
notnull = notnull == 't' if notnull.is_a?(String) # JDBC gets true/false
|
1235
|
-
# for ID columns we get a bit of non-sense default :
|
1236
|
-
# e.g. "nextval('mixed_cases_id_seq'::regclass"
|
1237
|
-
if default =~ /^nextval\(.*?\:\:regclass\)$/
|
1238
|
-
default = nil
|
1239
|
-
elsif default =~ /^\(([-+]?[\d\.]+)\)$/ # e.g. "(-1)" for a negative default
|
1240
|
-
default = $1
|
1241
|
-
end
|
1242
|
-
|
1243
|
-
column.new(name, default, oid, type, ! notnull, fmod, self)
|
1244
|
-
end
|
1245
|
-
end
|
1246
|
-
|
1247
|
-
# @private documented above
|
1248
|
-
def columns(table_name)
|
1249
|
-
column = jdbc_column_class
|
1250
|
-
# Limit, precision, and scale are all handled by the superclass.
|
1251
|
-
column_definitions(table_name).map! do |row|
|
1252
|
-
# |name, type, default, notnull, oid, fmod|
|
1253
|
-
name = row[0]; type = row[1]; default = row[2]
|
1254
|
-
notnull = row[3]; oid = row[4]; fmod = row[5]
|
1255
|
-
notnull = notnull == 't' if notnull.is_a?(String) # JDBC gets true/false
|
1256
|
-
|
1257
|
-
oid_type = get_oid_type(oid.to_i, fmod.to_i, name, type)
|
1258
|
-
default_value = extract_value_from_default(oid, default)
|
1259
|
-
default_function = extract_default_function(default_value, default)
|
1260
|
-
|
1261
|
-
column.new(name, default_value, oid_type, type, ! notnull, default_function, oid, self)
|
1262
|
-
end
|
1263
|
-
end if AR42
|
1264
|
-
|
1265
|
-
# @private only for API compatibility
|
1266
|
-
def new_column(name, default, cast_type, sql_type = nil, null = true, default_function = nil)
|
1267
|
-
jdbc_column_class.new(name, default, cast_type, sql_type, null, default_function)
|
1268
|
-
end if AR42
|
1269
|
-
|
1270
530
|
# @private
|
1271
531
|
def column_for(table_name, column_name)
|
1272
532
|
column_name = column_name.to_s
|
@@ -1285,139 +545,90 @@ module ArJdbc
|
|
1285
545
|
# - format_type includes the column size constraint, e.g. varchar(50)
|
1286
546
|
# - ::regclass is a function that gives the id for a table name
|
1287
547
|
def column_definitions(table_name)
|
1288
|
-
select_rows(<<-end_sql, 'SCHEMA')
|
548
|
+
rows = select_rows(<<-end_sql, 'SCHEMA')
|
1289
549
|
SELECT a.attname, format_type(a.atttypid, a.atttypmod),
|
1290
|
-
pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod
|
1291
|
-
|
1292
|
-
|
550
|
+
pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod,
|
551
|
+
(SELECT c.collname FROM pg_collation c, pg_type t
|
552
|
+
WHERE c.oid = a.attcollation AND t.oid = a.atttypid
|
553
|
+
AND a.attcollation <> t.typcollation),
|
554
|
+
col_description(a.attrelid, a.attnum) AS comment
|
555
|
+
FROM pg_attribute a
|
556
|
+
LEFT JOIN pg_attrdef d ON a.attrelid = d.adrelid AND a.attnum = d.adnum
|
1293
557
|
WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass
|
1294
558
|
AND a.attnum > 0 AND NOT a.attisdropped
|
1295
559
|
ORDER BY a.attnum
|
1296
560
|
end_sql
|
1297
|
-
end
|
1298
|
-
private :column_definitions
|
1299
|
-
|
1300
|
-
# @private
|
1301
|
-
TABLES_SQL = 'SELECT tablename FROM pg_tables WHERE schemaname = ANY (current_schemas(false))'
|
1302
|
-
private_constant :TABLES_SQL rescue nil
|
1303
|
-
|
1304
|
-
# @override
|
1305
|
-
def tables(name = nil)
|
1306
|
-
select_values(TABLES_SQL, 'SCHEMA')
|
1307
|
-
end
|
1308
561
|
|
1309
|
-
|
1310
|
-
|
1311
|
-
|
1312
|
-
if AR42 # -- (r)elation/table, (v)iew, (m)aterialized view
|
1313
|
-
TABLE_EXISTS_SQL_PREFIX << " WHERE c.relkind IN ('r','v','m')"
|
1314
|
-
else
|
1315
|
-
TABLE_EXISTS_SQL_PREFIX << " WHERE c.relkind IN ('r','v')"
|
1316
|
-
end
|
1317
|
-
TABLE_EXISTS_SQL_PREFIX << " AND c.relname = ?"
|
1318
|
-
private_constant :TABLE_EXISTS_SQL_PREFIX rescue nil
|
1319
|
-
|
1320
|
-
# Returns true if table exists.
|
1321
|
-
# If the schema is not specified as part of +name+ then it will only find tables within
|
1322
|
-
# the current schema search path (regardless of permissions to access tables in other schemas)
|
1323
|
-
def table_exists?(name)
|
1324
|
-
schema, table = extract_schema_and_table(name.to_s)
|
1325
|
-
return false unless table
|
1326
|
-
|
1327
|
-
binds = [[nil, table]]
|
1328
|
-
binds << [nil, schema] if schema
|
1329
|
-
|
1330
|
-
sql = "#{TABLE_EXISTS_SQL_PREFIX} AND n.nspname = #{schema ? "?" : 'ANY (current_schemas(false))'}"
|
1331
|
-
|
1332
|
-
log(sql, 'SCHEMA', binds) do
|
1333
|
-
@connection.execute_query_raw(sql, binds).first['table_count'] > 0
|
562
|
+
# Force the notnull attribute to a boolean
|
563
|
+
rows.each do |row|
|
564
|
+
row[3] = row[3] == 't' if row[3].is_a?(String)
|
1334
565
|
end
|
1335
566
|
end
|
1336
|
-
|
1337
|
-
|
1338
|
-
# @private
|
1339
|
-
DATA_SOURCES_SQL = 'SELECT c.relname FROM pg_class c'
|
1340
|
-
DATA_SOURCES_SQL << ' LEFT JOIN pg_namespace n ON n.oid = c.relnamespace'
|
1341
|
-
DATA_SOURCES_SQL << " WHERE c.relkind IN ('r', 'v','m')" # -- (r)elation/table, (v)iew, (m)aterialized view
|
1342
|
-
DATA_SOURCES_SQL << ' AND n.nspname = ANY (current_schemas(false))'
|
1343
|
-
private_constant :DATA_SOURCES_SQL rescue nil
|
1344
|
-
|
1345
|
-
# @override
|
1346
|
-
def data_sources
|
1347
|
-
select_values(DATA_SOURCES_SQL, 'SCHEMA')
|
1348
|
-
end
|
1349
|
-
|
1350
|
-
def drop_table(table_name, options = {})
|
1351
|
-
execute "DROP TABLE #{quote_table_name(table_name)}#{' CASCADE' if options[:force] == :cascade}"
|
1352
|
-
end
|
567
|
+
private :column_definitions
|
1353
568
|
|
1354
569
|
def truncate(table_name, name = nil)
|
1355
570
|
execute "TRUNCATE TABLE #{quote_table_name(table_name)}", name
|
1356
571
|
end
|
1357
572
|
|
1358
|
-
def index_name_exists?(table_name, index_name, default)
|
1359
|
-
exec_query(<<-SQL, 'SCHEMA').rows.first[0].to_i > 0
|
1360
|
-
SELECT COUNT(*)
|
1361
|
-
FROM pg_class t
|
1362
|
-
INNER JOIN pg_index d ON t.oid = d.indrelid
|
1363
|
-
INNER JOIN pg_class i ON d.indexrelid = i.oid
|
1364
|
-
WHERE i.relkind = 'i'
|
1365
|
-
AND i.relname = '#{index_name}'
|
1366
|
-
AND t.relname = '#{table_name}'
|
1367
|
-
AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )
|
1368
|
-
SQL
|
1369
|
-
end if AR42
|
1370
|
-
|
1371
573
|
# Returns an array of indexes for the given table.
|
1372
574
|
def indexes(table_name, name = nil)
|
1373
|
-
|
1374
|
-
|
1375
|
-
|
1376
|
-
|
1377
|
-
|
1378
|
-
|
1379
|
-
|
1380
|
-
|
1381
|
-
|
1382
|
-
|
1383
|
-
|
575
|
+
if name
|
576
|
+
ActiveSupport::Deprecation.warn(<<-MSG.squish)
|
577
|
+
Passing name to #indexes is deprecated without replacement.
|
578
|
+
MSG
|
579
|
+
end
|
580
|
+
|
581
|
+
# FIXME: AR version => table = Utils.extract_schema_qualified_name(table_name.to_s)
|
582
|
+
schema, table = extract_schema_and_table(table_name.to_s)
|
583
|
+
|
584
|
+
result = query(<<-SQL, 'SCHEMA')
|
585
|
+
SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid,
|
586
|
+
pg_catalog.obj_description(i.oid, 'pg_class') AS comment,
|
587
|
+
(SELECT COUNT(*) FROM pg_opclass o
|
588
|
+
JOIN (SELECT unnest(string_to_array(d.indclass::text, ' '))::int oid) c
|
589
|
+
ON o.oid = c.oid WHERE o.opcdefault = 'f')
|
590
|
+
FROM pg_class t
|
591
|
+
INNER JOIN pg_index d ON t.oid = d.indrelid
|
592
|
+
INNER JOIN pg_class i ON d.indexrelid = i.oid
|
593
|
+
LEFT JOIN pg_namespace n ON n.oid = i.relnamespace
|
594
|
+
WHERE i.relkind = 'i'
|
595
|
+
AND d.indisprimary = 'f'
|
596
|
+
AND t.relname = '#{table}'
|
597
|
+
AND n.nspname = #{schema ? "'#{schema}'" : 'ANY (current_schemas(false))'}
|
598
|
+
ORDER BY i.relname
|
1384
599
|
SQL
|
1385
600
|
|
1386
|
-
result.map
|
601
|
+
result.map do |row|
|
1387
602
|
index_name = row[0]
|
603
|
+
# FIXME: These values [1,2] are returned in a different format than AR expects, maybe we could update it on the Java side to be more accurate
|
1388
604
|
unique = row[1].is_a?(String) ? row[1] == 't' : row[1] # JDBC gets us a boolean
|
1389
605
|
indkey = row[2].is_a?(Java::OrgPostgresqlUtil::PGobject) ? row[2].value : row[2]
|
1390
|
-
indkey = indkey.split(" ")
|
606
|
+
indkey = indkey.split(" ").map(&:to_i)
|
1391
607
|
inddef = row[3]
|
1392
608
|
oid = row[4]
|
609
|
+
comment = row[5]
|
610
|
+
opclass = row[6]
|
1393
611
|
|
1394
|
-
|
1395
|
-
SELECT a.attnum, a.attname
|
1396
|
-
FROM pg_attribute a
|
1397
|
-
WHERE a.attrelid = #{oid}
|
1398
|
-
AND a.attnum IN (#{indkey.join(",")})
|
1399
|
-
SQL
|
612
|
+
using, expressions, where = inddef.scan(/ USING (\w+?) \((.+?)\)(?: WHERE (.+))?\z/).flatten
|
1400
613
|
|
1401
|
-
|
1402
|
-
|
614
|
+
if indkey.include?(0) || opclass > 0
|
615
|
+
columns = expressions
|
616
|
+
else
|
617
|
+
columns = Hash[query(<<-SQL.strip_heredoc, "SCHEMA")].values_at(*indkey).compact
|
618
|
+
SELECT a.attnum, a.attname
|
619
|
+
FROM pg_attribute a
|
620
|
+
WHERE a.attrelid = #{oid}
|
621
|
+
AND a.attnum IN (#{indkey.join(",")})
|
622
|
+
SQL
|
1403
623
|
|
1404
|
-
unless column_names.empty?
|
1405
624
|
# add info on sort order for columns (only desc order is explicitly specified, asc is the default)
|
1406
|
-
|
1407
|
-
|
1408
|
-
|
1409
|
-
if ::ActiveRecord::VERSION::MAJOR > 3 # AR4 supports `where` and `using` index options
|
1410
|
-
where = inddef.scan(/WHERE (.+)$/).flatten[0]
|
1411
|
-
using = inddef.scan(/USING (.+?) /).flatten[0].to_sym
|
1412
|
-
|
1413
|
-
IndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, nil, using)
|
1414
|
-
else
|
1415
|
-
new_index_definition(table_name, index_name, unique, column_names, [], orders)
|
1416
|
-
end
|
625
|
+
orders = Hash[
|
626
|
+
expressions.scan(/(\w+) DESC/).flatten.map { |order_column| [order_column, :desc] }
|
627
|
+
]
|
1417
628
|
end
|
1418
|
-
|
1419
|
-
|
1420
|
-
|
629
|
+
|
630
|
+
IndexDefinition.new(table_name, index_name, unique, columns, [], orders, where, nil, using.to_sym, comment.presence)
|
631
|
+
end.compact
|
1421
632
|
end
|
1422
633
|
|
1423
634
|
# @private
|
@@ -1428,16 +639,48 @@ module ArJdbc
|
|
1428
639
|
when 'average' then 'avg'
|
1429
640
|
else operation.downcase
|
1430
641
|
end
|
1431
|
-
end
|
642
|
+
end
|
1432
643
|
|
1433
644
|
private
|
1434
645
|
|
646
|
+
# Pulled from ActiveRecord's Postgres adapter and modified to use execute
|
647
|
+
def can_perform_case_insensitive_comparison_for?(column)
|
648
|
+
@case_insensitive_cache ||= {}
|
649
|
+
@case_insensitive_cache[column.sql_type] ||= begin
|
650
|
+
sql = <<-end_sql
|
651
|
+
SELECT exists(
|
652
|
+
SELECT * FROM pg_proc
|
653
|
+
WHERE proname = 'lower'
|
654
|
+
AND proargtypes = ARRAY[#{quote column.sql_type}::regtype]::oidvector
|
655
|
+
) OR exists(
|
656
|
+
SELECT * FROM pg_proc
|
657
|
+
INNER JOIN pg_cast
|
658
|
+
ON ARRAY[casttarget]::oidvector = proargtypes
|
659
|
+
WHERE proname = 'lower'
|
660
|
+
AND castsource = #{quote column.sql_type}::regtype
|
661
|
+
)
|
662
|
+
end_sql
|
663
|
+
select_rows(sql, 'SCHEMA').first.first == 't'
|
664
|
+
end
|
665
|
+
end
|
666
|
+
|
1435
667
|
def translate_exception(exception, message)
|
668
|
+
# TODO: Can we base these on an error code of some kind?
|
1436
669
|
case exception.message
|
1437
670
|
when /duplicate key value violates unique constraint/
|
1438
|
-
::ActiveRecord::RecordNotUnique.new(message
|
671
|
+
::ActiveRecord::RecordNotUnique.new(message)
|
672
|
+
when /violates not-null constraint/
|
673
|
+
::ActiveRecord::NotNullViolation.new(message)
|
1439
674
|
when /violates foreign key constraint/
|
1440
|
-
::ActiveRecord::InvalidForeignKey.new(message
|
675
|
+
::ActiveRecord::InvalidForeignKey.new(message)
|
676
|
+
when /value too long/
|
677
|
+
::ActiveRecord::ValueTooLong.new(message)
|
678
|
+
when /out of range/
|
679
|
+
::ActiveRecord::RangeError.new(message)
|
680
|
+
when /could not serialize/
|
681
|
+
::ActiveRecord::SerializationFailure.new(message)
|
682
|
+
when /deadlock detected/
|
683
|
+
::ActiveRecord::Deadlocked.new(message)
|
1441
684
|
else
|
1442
685
|
super
|
1443
686
|
end
|
@@ -1477,30 +720,37 @@ require 'arjdbc/util/quoted_cache'
|
|
1477
720
|
|
1478
721
|
module ActiveRecord::ConnectionAdapters
|
1479
722
|
|
1480
|
-
remove_const(:PostgreSQLColumn) if const_defined?(:PostgreSQLColumn)
|
1481
|
-
|
1482
|
-
class PostgreSQLColumn < JdbcColumn
|
1483
|
-
include ::ArJdbc::PostgreSQL::Column
|
1484
|
-
end
|
1485
|
-
|
1486
723
|
# NOTE: seems needed on 4.x due loading of '.../postgresql/oid' which
|
1487
724
|
# assumes: class PostgreSQLAdapter < AbstractAdapter
|
1488
725
|
remove_const(:PostgreSQLAdapter) if const_defined?(:PostgreSQLAdapter)
|
1489
726
|
|
1490
|
-
class PostgreSQLAdapter <
|
1491
|
-
include ::ArJdbc::PostgreSQL
|
1492
|
-
include ::ArJdbc::PostgreSQL::ExplainSupport
|
727
|
+
class PostgreSQLAdapter < AbstractAdapter
|
1493
728
|
|
1494
|
-
|
1495
|
-
|
729
|
+
# Try to use as much of the built in postgres logic as possible
|
730
|
+
# maybe someday we can extend the actual adapter
|
731
|
+
include ActiveRecord::ConnectionAdapters::PostgreSQL::ColumnDumper
|
732
|
+
include ActiveRecord::ConnectionAdapters::PostgreSQL::ReferentialIntegrity
|
733
|
+
include ActiveRecord::ConnectionAdapters::PostgreSQL::SchemaStatements
|
734
|
+
include ActiveRecord::ConnectionAdapters::PostgreSQL::Quoting
|
1496
735
|
|
1497
|
-
|
736
|
+
include ArJdbc::Abstract::Core
|
737
|
+
include ArJdbc::Abstract::ConnectionManagement
|
738
|
+
include ArJdbc::Abstract::DatabaseStatements
|
739
|
+
include ArJdbc::Abstract::StatementCache
|
740
|
+
include ArJdbc::Abstract::TransactionSupport
|
741
|
+
include ArJdbc::PostgreSQL
|
1498
742
|
|
1499
|
-
|
743
|
+
require 'arjdbc/postgresql/oid_types'
|
744
|
+
include ::ArJdbc::PostgreSQL::OIDTypes
|
745
|
+
|
746
|
+
include ::ArJdbc::PostgreSQL::ColumnHelpers
|
1500
747
|
|
1501
748
|
include ::ArJdbc::Util::QuotedCache
|
1502
749
|
|
1503
|
-
|
750
|
+
# AR expects OID to be available on the adapter
|
751
|
+
OID = ActiveRecord::ConnectionAdapters::PostgreSQL::OID
|
752
|
+
|
753
|
+
def initialize(connection, logger = nil, config = {})
|
1504
754
|
# @local_tz is initialized as nil to avoid warnings when connect tries to use it
|
1505
755
|
@local_tz = nil
|
1506
756
|
|
@@ -1508,7 +758,7 @@ module ActiveRecord::ConnectionAdapters
|
|
1508
758
|
|
1509
759
|
@table_alias_length = nil
|
1510
760
|
|
1511
|
-
initialize_type_map(@type_map = Type::HashLookupTypeMap.new)
|
761
|
+
initialize_type_map(@type_map = Type::HashLookupTypeMap.new)
|
1512
762
|
|
1513
763
|
@use_insert_returning = @config.key?(:insert_returning) ?
|
1514
764
|
self.class.type_cast_config_to_boolean(@config[:insert_returning]) : nil
|
@@ -1518,34 +768,49 @@ module ActiveRecord::ConnectionAdapters
|
|
1518
768
|
Arel::Visitors::PostgreSQL.new(self)
|
1519
769
|
end
|
1520
770
|
|
1521
|
-
|
1522
|
-
require 'active_record/connection_adapters/postgresql/schema_definitions'
|
1523
|
-
else
|
1524
|
-
require 'arjdbc/postgresql/base/schema_definitions'
|
1525
|
-
end
|
1526
|
-
|
1527
|
-
ColumnDefinition = ActiveRecord::ConnectionAdapters::PostgreSQL::ColumnDefinition
|
771
|
+
require 'active_record/connection_adapters/postgresql/schema_definitions'
|
1528
772
|
|
1529
773
|
ColumnMethods = ActiveRecord::ConnectionAdapters::PostgreSQL::ColumnMethods
|
1530
774
|
TableDefinition = ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition
|
775
|
+
Table = ActiveRecord::ConnectionAdapters::PostgreSQL::Table
|
1531
776
|
|
1532
|
-
def
|
1533
|
-
|
777
|
+
def create_table_definition(*args) # :nodoc:
|
778
|
+
TableDefinition.new(*args)
|
1534
779
|
end
|
1535
780
|
|
1536
|
-
|
781
|
+
def exec_query(sql, name = nil, binds = [], prepare: false)
|
782
|
+
super
|
783
|
+
rescue ActiveRecord::StatementInvalid => e
|
784
|
+
raise unless e.cause.message.include?('cached plan must not change result type'.freeze)
|
785
|
+
|
786
|
+
if open_transactions > 0
|
787
|
+
# In a transaction, have to fail it - See AR code for details
|
788
|
+
raise ActiveRecord::PreparedStatementCacheExpired.new(e.cause.message)
|
789
|
+
else
|
790
|
+
# Not in a transaction, clear the prepared statement and try again
|
791
|
+
delete_cached_statement(sql)
|
792
|
+
retry
|
793
|
+
end
|
794
|
+
end
|
795
|
+
|
796
|
+
def schema_creation # :nodoc:
|
797
|
+
PostgreSQL::SchemaCreation.new self
|
798
|
+
end
|
1537
799
|
|
1538
800
|
def update_table_definition(table_name, base)
|
1539
801
|
Table.new(table_name, base)
|
1540
|
-
end
|
802
|
+
end
|
1541
803
|
|
1542
804
|
def jdbc_connection_class(spec)
|
1543
805
|
::ArJdbc::PostgreSQL.jdbc_connection_class
|
1544
806
|
end
|
1545
807
|
|
1546
|
-
|
1547
|
-
|
1548
|
-
|
808
|
+
private
|
809
|
+
|
810
|
+
# Prepared statements aren't schema aware so we need to make sure we
|
811
|
+
# store different PreparedStatement objects for different schemas
|
812
|
+
def cached_statement_key(sql)
|
813
|
+
"#{schema_search_path}-#{sql}"
|
1549
814
|
end
|
1550
815
|
|
1551
816
|
end
|