activerecord7-redshift-adapter-pennylane 1.0.1 → 1.0.2

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/lib/active_record/connection_adapters/redshift_7_0/oid.rb +17 -0
  3. data/lib/active_record/connection_adapters/redshift_7_0_adapter.rb +770 -0
  4. data/lib/active_record/connection_adapters/redshift_7_1/array_parser.rb +92 -0
  5. data/lib/active_record/connection_adapters/redshift_7_1/column.rb +17 -0
  6. data/lib/active_record/connection_adapters/redshift_7_1/database_statements.rb +232 -0
  7. data/lib/active_record/connection_adapters/redshift_7_1/oid/date_time.rb +36 -0
  8. data/lib/active_record/connection_adapters/redshift_7_1/oid/decimal.rb +15 -0
  9. data/lib/active_record/connection_adapters/redshift_7_1/oid/json.rb +41 -0
  10. data/lib/active_record/connection_adapters/redshift_7_1/oid/jsonb.rb +25 -0
  11. data/lib/active_record/connection_adapters/redshift_7_1/oid/type_map_initializer.rb +62 -0
  12. data/lib/active_record/connection_adapters/redshift_7_1/oid.rb +17 -0
  13. data/lib/active_record/connection_adapters/redshift_7_1/quoting.rb +99 -0
  14. data/lib/active_record/connection_adapters/redshift_7_1/referential_integrity.rb +17 -0
  15. data/lib/active_record/connection_adapters/redshift_7_1/schema_definitions.rb +70 -0
  16. data/lib/active_record/connection_adapters/redshift_7_1/schema_dumper.rb +17 -0
  17. data/lib/active_record/connection_adapters/redshift_7_1/schema_statements.rb +424 -0
  18. data/lib/active_record/connection_adapters/redshift_7_1/type_metadata.rb +39 -0
  19. data/lib/active_record/connection_adapters/redshift_7_1/utils.rb +81 -0
  20. data/lib/active_record/connection_adapters/redshift_7_1_adapter.rb +769 -0
  21. data/lib/active_record/connection_adapters/redshift_adapter.rb +7 -768
  22. metadata +38 -19
  23. data/lib/active_record/connection_adapters/redshift/oid.rb +0 -17
  24. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/array_parser.rb +0 -0
  25. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/column.rb +0 -0
  26. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/database_statements.rb +0 -0
  27. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/oid/date_time.rb +0 -0
  28. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/oid/decimal.rb +0 -0
  29. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/oid/json.rb +0 -0
  30. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/oid/jsonb.rb +0 -0
  31. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/oid/type_map_initializer.rb +0 -0
  32. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/quoting.rb +0 -0
  33. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/referential_integrity.rb +0 -0
  34. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/schema_definitions.rb +0 -0
  35. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/schema_dumper.rb +0 -0
  36. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/schema_statements.rb +0 -0
  37. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/type_metadata.rb +0 -0
  38. /data/lib/active_record/connection_adapters/{redshift → redshift_7_0}/utils.rb +0 -0
@@ -1,770 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'active_record/connection_adapters/abstract_adapter'
4
- require 'active_record/connection_adapters/statement_pool'
5
-
6
- require 'active_record/connection_adapters/redshift/utils'
7
- require 'active_record/connection_adapters/redshift/column'
8
- require 'active_record/connection_adapters/redshift/oid'
9
- require 'active_record/connection_adapters/redshift/quoting'
10
- require 'active_record/connection_adapters/redshift/referential_integrity'
11
- require 'active_record/connection_adapters/redshift/schema_definitions'
12
- require 'active_record/connection_adapters/redshift/schema_dumper'
13
- require 'active_record/connection_adapters/redshift/schema_statements'
14
- require 'active_record/connection_adapters/redshift/type_metadata'
15
- require 'active_record/connection_adapters/redshift/database_statements'
16
-
17
- require 'active_record/tasks/database_tasks'
18
-
19
- require 'pg'
20
-
21
- require 'ipaddr'
22
-
23
- ActiveRecord::Tasks::DatabaseTasks.register_task(/redshift/, 'ActiveRecord::Tasks::PostgreSQLDatabaseTasks')
24
-
25
- module ActiveRecord
26
- module ConnectionHandling # :nodoc:
27
- RS_VALID_CONN_PARAMS = %i[host hostaddr port dbname user password connect_timeout
28
- client_encoding options application_name fallback_application_name
29
- keepalives keepalives_idle keepalives_interval keepalives_count
30
- tty sslmode requiressl sslcompression sslcert sslkey
31
- sslrootcert sslcrl requirepeer krbsrvname gsslib service].freeze
32
-
33
- # Establishes a connection to the database that's used by all Active Record objects
34
- def redshift_connection(config)
35
- conn_params = config.symbolize_keys
36
-
37
- conn_params.delete_if { |_, v| v.nil? }
38
-
39
- # Map ActiveRecords param names to PGs.
40
- conn_params[:user] = conn_params.delete(:username) if conn_params[:username]
41
- conn_params[:dbname] = conn_params.delete(:database) if conn_params[:database]
42
-
43
- # Forward only valid config params to PG::Connection.connect.
44
- conn_params.keep_if { |k, _| RS_VALID_CONN_PARAMS.include?(k) }
45
-
46
- # The postgres drivers don't allow the creation of an unconnected PG::Connection object,
47
- # so just pass a nil connection object for the time being.
48
- ConnectionAdapters::RedshiftAdapter.new(nil, logger, conn_params, config)
49
- end
50
- end
51
-
52
- module ConnectionAdapters
53
- # The PostgreSQL adapter works with the native C (https://bitbucket.org/ged/ruby-pg) driver.
54
- #
55
- # Options:
56
- #
57
- # * <tt>:host</tt> - Defaults to a Unix-domain socket in /tmp. On machines without Unix-domain sockets,
58
- # the default is to connect to localhost.
59
- # * <tt>:port</tt> - Defaults to 5432.
60
- # * <tt>:username</tt> - Defaults to be the same as the operating system name of the user running the application.
61
- # * <tt>:password</tt> - Password to be used if the server demands password authentication.
62
- # * <tt>:database</tt> - Defaults to be the same as the user name.
63
- # * <tt>:schema_search_path</tt> - An optional schema search path for the connection given
64
- # as a string of comma-separated schema names. This is backward-compatible with the <tt>:schema_order</tt> option.
65
- # * <tt>:encoding</tt> - An optional client encoding that is used in a <tt>SET client_encoding TO
66
- # <encoding></tt> call on the connection.
67
- # * <tt>:min_messages</tt> - An optional client min messages that is used in a
68
- # <tt>SET client_min_messages TO <min_messages></tt> call on the connection.
69
- # * <tt>:variables</tt> - An optional hash of additional parameters that
70
- # will be used in <tt>SET SESSION key = val</tt> calls on the connection.
71
- # * <tt>:insert_returning</tt> - Does nothing for Redshift.
72
- #
73
- # Any further options are used as connection parameters to libpq. See
74
- # http://www.postgresql.org/docs/9.1/static/libpq-connect.html for the
75
- # list of parameters.
76
- #
77
- # In addition, default connection parameters of libpq can be set per environment variables.
78
- # See http://www.postgresql.org/docs/9.1/static/libpq-envars.html .
79
- class RedshiftAdapter < AbstractAdapter
80
- ADAPTER_NAME = 'Redshift'
81
-
82
- NATIVE_DATABASE_TYPES = {
83
- primary_key: 'integer identity primary key',
84
- string: { name: 'varchar' },
85
- text: { name: 'varchar' },
86
- integer: { name: 'integer' },
87
- float: { name: 'decimal' },
88
- decimal: { name: 'decimal' },
89
- datetime: { name: 'timestamp' },
90
- time: { name: 'timestamp' },
91
- date: { name: 'date' },
92
- bigint: { name: 'bigint' },
93
- boolean: { name: 'boolean' }
94
- }.freeze
95
-
96
- OID = Redshift::OID # :nodoc:
97
-
98
- include Redshift::Quoting
99
- include Redshift::ReferentialIntegrity
100
- include Redshift::SchemaStatements
101
- include Redshift::DatabaseStatements
102
-
103
- def schema_creation # :nodoc:
104
- Redshift::SchemaCreation.new self
105
- end
106
-
107
- def supports_index_sort_order?
108
- false
109
- end
110
-
111
- def supports_partial_index?
112
- false
113
- end
114
-
115
- def supports_transaction_isolation?
116
- false
117
- end
118
-
119
- def supports_foreign_keys?
120
- true
121
- end
122
-
123
- def supports_deferrable_constraints?
124
- false
125
- end
126
-
127
- def supports_views?
128
- true
129
- end
130
-
131
- def supports_virtual_columns?
132
- false
133
- end
134
-
135
- def index_algorithms
136
- { concurrently: 'CONCURRENTLY' }
137
- end
138
-
139
- class StatementPool < ConnectionAdapters::StatementPool # :nodoc:
140
- def initialize(connection, max)
141
- super(max)
142
- @connection = connection
143
- @counter = 0
144
- end
145
-
146
- def next_key
147
- "a#{@counter + 1}"
148
- end
149
-
150
- def []=(sql, key)
151
- super.tap { @counter += 1 }
152
- end
153
-
154
- private
155
-
156
- def dealloc(key)
157
- @connection.query "DEALLOCATE #{key}" if connection_active?
158
- rescue PG::Error
159
- end
160
-
161
- def connection_active?
162
- @connection.status == PG::CONNECTION_OK
163
- rescue PG::Error
164
- false
165
- end
166
- end
167
-
168
- # Initializes and connects a PostgreSQL adapter.
169
- def initialize(connection, logger, connection_parameters, config)
170
- super(connection, logger, config)
171
-
172
- @visitor = Arel::Visitors::PostgreSQL.new self
173
- @visitor.extend(ConnectionAdapters::DetermineIfPreparableVisitor) if defined?(ConnectionAdapters::DetermineIfPreparableVisitor)
174
- @prepared_statements = false
175
-
176
- @connection_parameters = connection_parameters
177
-
178
- # @local_tz is initialized as nil to avoid warnings when connect tries to use it
179
- @local_tz = nil
180
- @table_alias_length = nil
181
-
182
- connect
183
- @statements = StatementPool.new @connection,
184
- self.class.type_cast_config_to_integer(config[:statement_limit])
185
-
186
- @type_map = Type::HashLookupTypeMap.new
187
- initialize_type_map(type_map)
188
- @local_tz = execute('SHOW TIME ZONE', 'SCHEMA').first['TimeZone']
189
- @use_insert_returning = @config.key?(:insert_returning) ? self.class.type_cast_config_to_boolean(@config[:insert_returning]) : false
190
- end
191
-
192
- # Clears the prepared statements cache.
193
- def clear_cache!(new_connection: false)
194
- @statements.clear
195
- end
196
-
197
- def truncate(table_name, name = nil)
198
- exec_query "TRUNCATE TABLE #{quote_table_name(table_name)}", name, []
199
- end
200
-
201
- # Is this connection alive and ready for queries?
202
- def active?
203
- @connection.query 'SELECT 1'
204
- true
205
- rescue PG::Error
206
- false
207
- end
208
-
209
- def reload_type_map
210
- type_map.clear
211
- initialize_type_map
212
- end
213
-
214
- # Close then reopen the connection.
215
- def reconnect!
216
- super
217
- @connection.reset
218
- configure_connection
219
- reload_type_map
220
- end
221
-
222
- def reset!
223
- clear_cache!
224
- reset_transaction
225
- @connection.query 'ROLLBACK' unless @connection.transaction_status == ::PG::PQTRANS_IDLE
226
- @connection.query 'DISCARD ALL'
227
- configure_connection
228
- end
229
-
230
- # Disconnects from the database if already connected. Otherwise, this
231
- # method does nothing.
232
- def disconnect!
233
- super
234
- begin
235
- @connection.close
236
- rescue StandardError
237
- nil
238
- end
239
- end
240
-
241
- def native_database_types # :nodoc:
242
- NATIVE_DATABASE_TYPES
243
- end
244
-
245
- # Returns true, since this connection adapter supports migrations.
246
- def supports_migrations?
247
- true
248
- end
249
-
250
- # Does PostgreSQL support finding primary key on non-Active Record tables?
251
- def supports_primary_key? # :nodoc:
252
- true
253
- end
254
-
255
- def supports_ddl_transactions?
256
- true
257
- end
258
-
259
- def supports_explain?
260
- true
261
- end
262
-
263
- def supports_extensions?
264
- false
265
- end
266
-
267
- def supports_ranges?
268
- false
269
- end
270
-
271
- def supports_materialized_views?
272
- false
273
- end
274
-
275
- def supports_import?
276
- true
277
- end
278
-
279
- def enable_extension(name); end
280
-
281
- def disable_extension(name); end
282
-
283
- def extension_enabled?(_name)
284
- false
285
- end
286
-
287
- # Returns the configured supported identifier length supported by PostgreSQL
288
- def table_alias_length
289
- @table_alias_length ||= query('SHOW max_identifier_length', 'SCHEMA')[0][0].to_i
290
- end
291
-
292
- # Set the authorized user for this session
293
- def session_auth=(user)
294
- clear_cache!
295
- exec_query "SET SESSION AUTHORIZATION #{user}"
296
- end
297
-
298
- def use_insert_returning?
299
- false
300
- end
301
-
302
- def valid_type?(type)
303
- !native_database_types[type].nil?
304
- end
305
-
306
- def update_table_definition(table_name, base) # :nodoc:
307
- Redshift::Table.new(table_name, base)
308
- end
309
-
310
- def lookup_cast_type(sql_type) # :nodoc:
311
- oid = execute("SELECT #{quote(sql_type)}::regtype::oid", 'SCHEMA').first['oid'].to_i
312
- super(oid)
313
- end
314
-
315
- def column_name_for_operation(operation, _node) # :nodoc:
316
- OPERATION_ALIASES.fetch(operation) { operation.downcase }
317
- end
318
-
319
- OPERATION_ALIASES = { # :nodoc:
320
- 'maximum' => 'max',
321
- 'minimum' => 'min',
322
- 'average' => 'avg'
323
- }.freeze
324
-
325
- protected
326
-
327
- # Returns the version of the connected PostgreSQL server.
328
- def redshift_version
329
- @connection.server_version
330
- end
331
-
332
- def translate_exception(exception, message:, sql:, binds:)
333
- return exception unless exception.respond_to?(:result)
334
-
335
- case exception.message
336
- when /duplicate key value violates unique constraint/
337
- RecordNotUnique.new(message, exception)
338
- when /violates foreign key constraint/
339
- InvalidForeignKey.new(message, exception)
340
- else
341
- super
342
- end
343
- end
344
-
345
- class << self
346
- def initialize_type_map(m) # :nodoc:
347
- m.register_type 'int2', Type::Integer.new(limit: 2)
348
- m.register_type 'int4', Type::Integer.new(limit: 4)
349
- m.register_type 'int8', Type::Integer.new(limit: 8)
350
- m.alias_type 'oid', 'int2'
351
- m.register_type 'float4', Type::Float.new
352
- m.alias_type 'float8', 'float4'
353
- m.register_type 'text', Type::Text.new
354
- register_class_with_limit m, 'varchar', Type::String
355
- m.alias_type 'char', 'varchar'
356
- m.alias_type 'name', 'varchar'
357
- m.alias_type 'bpchar', 'varchar'
358
- m.register_type 'bool', Type::Boolean.new
359
- m.alias_type 'timestamptz', 'timestamp'
360
- m.register_type 'date', Type::Date.new
361
- m.register_type 'time', Type::Time.new
362
-
363
- m.register_type 'timestamp' do |_, _, sql_type|
364
- precision = extract_precision(sql_type)
365
- OID::DateTime.new(precision: precision)
366
- end
367
-
368
- m.register_type 'numeric' do |_, fmod, sql_type|
369
- precision = extract_precision(sql_type)
370
- scale = extract_scale(sql_type)
371
-
372
- # The type for the numeric depends on the width of the field,
373
- # so we'll do something special here.
374
- #
375
- # When dealing with decimal columns:
376
- #
377
- # places after decimal = fmod - 4 & 0xffff
378
- # places before decimal = (fmod - 4) >> 16 & 0xffff
379
- if fmod && (fmod - 4 & 0xffff) == 0
380
- # FIXME: Remove this class, and the second argument to
381
- # lookups on PG
382
- Type::DecimalWithoutScale.new(precision: precision)
383
- else
384
- OID::Decimal.new(precision: precision, scale: scale)
385
- end
386
- end
387
- end
388
- end
389
-
390
- private
391
-
392
- def get_oid_type(oid, fmod, column_name, sql_type = '') # :nodoc:
393
- load_additional_types(type_map, [oid]) unless type_map.key?(oid)
394
-
395
- type_map.fetch(oid, fmod, sql_type) do
396
- warn "unknown OID #{oid}: failed to recognize type of '#{column_name}'. It will be treated as String."
397
- Type::Value.new.tap do |cast_type|
398
- type_map.register_type(oid, cast_type)
399
- end
400
- end
401
- end
402
-
403
- def type_map
404
- @type_map ||= Type::HashLookupTypeMap.new
405
- end
406
-
407
- def initialize_type_map(m = type_map)
408
- self.class.initialize_type_map(m)
409
- load_additional_types(m)
410
- end
411
-
412
- def extract_limit(sql_type) # :nodoc:
413
- case sql_type
414
- when /^bigint/i, /^int8/i
415
- 8
416
- when /^smallint/i
417
- 2
418
- else
419
- super
420
- end
421
- end
422
-
423
- # Extracts the value from a PostgreSQL column default definition.
424
- def extract_value_from_default(default) # :nodoc:
425
- case default
426
- # Quoted types
427
- when /\A[(B]?'(.*)'::/m
428
- Regexp.last_match(1).gsub(/''/, "'")
429
- # Boolean types
430
- when 'true', 'false'
431
- default
432
- # Numeric types
433
- when /\A\(?(-?\d+(\.\d*)?)\)?\z/
434
- Regexp.last_match(1)
435
- # Object identifier types
436
- when /\A-?\d+\z/
437
- Regexp.last_match(1)
438
- else # rubocop:disable Style/EmptyElse
439
- # Anything else is blank, some user type, or some function
440
- # and we can't know the value of that, so return nil.
441
- nil
442
- end
443
- end
444
-
445
- def extract_default_function(default_value, default) # :nodoc:
446
- default if has_default_function?(default_value, default)
447
- end
448
-
449
- def has_default_function?(default_value, default) # :nodoc:
450
- !default_value && (/\w+\(.*\)/ === default)
451
- end
452
-
453
- def load_additional_types(type_map, oids = nil) # :nodoc:
454
- initializer = OID::TypeMapInitializer.new(type_map)
455
-
456
- load_types_queries(initializer, oids) do |query|
457
- execute_and_clear(query, 'SCHEMA', []) do |records|
458
- initializer.run(records)
459
- end
460
- end
461
- end
462
-
463
- def load_types_queries(_initializer, oids)
464
- query =
465
- if supports_ranges?
466
- <<-SQL
467
- SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, r.rngsubtype, t.typtype, t.typbasetype
468
- FROM pg_type as t
469
- LEFT JOIN pg_range as r ON oid = rngtypid
470
- SQL
471
- else
472
- <<-SQL
473
- SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, t.typtype, t.typbasetype
474
- FROM pg_type as t
475
- SQL
476
- end
477
-
478
- if oids
479
- yield query + 'WHERE t.oid::integer IN (%s)' % oids.join(', ')
480
- else
481
- yield query
482
- end
483
- end
484
-
485
- FEATURE_NOT_SUPPORTED = '0A000' # :nodoc:
486
-
487
- def execute_and_clear(sql, name, binds, prepare: false, async: false)
488
- result =
489
- if without_prepared_statement?(binds)
490
- exec_no_cache(sql, name, [])
491
- elsif !prepare
492
- exec_no_cache(sql, name, binds)
493
- else
494
- exec_cache(sql, name, binds)
495
- end
496
-
497
- ret = yield result
498
- result.clear
499
- ret
500
- end
501
-
502
- def exec_no_cache(sql, name, binds)
503
- materialize_transactions
504
-
505
- # make sure we carry over any changes to ActiveRecord.default_timezone that have been
506
- # made since we established the connection
507
- update_typemap_for_default_timezone
508
-
509
- type_casted_binds = type_casted_binds(binds)
510
- log(sql, name, binds, type_casted_binds) do
511
- ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
512
- @connection.exec_params(sql, type_casted_binds)
513
- end
514
- end
515
- end
516
-
517
- def exec_cache(sql, name, binds)
518
- materialize_transactions
519
- update_typemap_for_default_timezone
520
-
521
- stmt_key = prepare_statement(sql, binds)
522
- type_casted_binds = type_casted_binds(binds)
523
-
524
- log(sql, name, binds, type_casted_binds, stmt_key) do
525
- ActiveSupport::Dependencies.interlock.permit_concurrent_loads do
526
- @connection.exec_prepared(stmt_key, type_casted_binds)
527
- end
528
- end
529
- rescue ActiveRecord::StatementInvalid => e
530
- raise unless is_cached_plan_failure?(e)
531
- raise ActiveRecord::PreparedStatementCacheExpired, e.cause.message if in_transaction?
532
-
533
- @lock.synchronize do
534
- # outside of transactions we can simply flush this query and retry
535
- @statements.delete sql_key(sql)
536
- end
537
-
538
- retry
539
- end
540
-
541
- # Annoyingly, the code for prepared statements whose return value may
542
- # have changed is FEATURE_NOT_SUPPORTED.
543
- #
544
- # This covers various different error types so we need to do additional
545
- # work to classify the exception definitively as a
546
- # ActiveRecord::PreparedStatementCacheExpired
547
- #
548
- # Check here for more details:
549
- # https://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573
550
- CACHED_PLAN_HEURISTIC = 'cached plan must not change result type'
551
- def is_cached_plan_failure?(e)
552
- pgerror = e.cause
553
- code = pgerror.result.result_error_field(PG::PG_DIAG_SQLSTATE)
554
- code == FEATURE_NOT_SUPPORTED && pgerror.message.include?(CACHED_PLAN_HEURISTIC)
555
- rescue StandardError
556
- false
557
- end
558
-
559
- # Returns the statement identifier for the client side cache
560
- # of statements
561
- def sql_key(sql)
562
- "#{schema_search_path}-#{sql}"
563
- end
564
-
565
- # Prepare the statement if it hasn't been prepared, return
566
- # the statement key.
567
- def prepare_statement(sql, binds)
568
- @lock.synchronize do
569
- sql_key = sql_key(sql)
570
- unless @statements.key? sql_key
571
- nextkey = @statements.next_key
572
- begin
573
- @connection.prepare nextkey, sql
574
- rescue StandardError => e
575
- raise translate_exception_class(e, sql, binds)
576
- end
577
- # Clear the queue
578
- @connection.get_last_result
579
- @statements[sql_key] = nextkey
580
- end
581
- @statements[sql_key]
582
- end
583
- end
584
-
585
- # Connects to a PostgreSQL server and sets up the adapter depending on the
586
- # connected server's characteristics.
587
- def connect
588
- @connection = PG.connect(@connection_parameters)
589
- configure_connection
590
- add_pg_encoders
591
- add_pg_decoders
592
- end
593
-
594
- # Configures the encoding, verbosity, schema search path, and time zone of the connection.
595
- # This is called by #connect and should not be called manually.
596
- def configure_connection
597
- @connection.set_client_encoding(@config[:encoding]) if @config[:encoding]
598
- self.schema_search_path = @config[:schema_search_path] || @config[:schema_order]
599
-
600
- variables = @config.fetch(:variables, {}).stringify_keys
601
-
602
- # If using Active Record's time zone support configure the connection to return
603
- # TIMESTAMP WITH ZONE types in UTC.
604
- unless variables['timezone']
605
- if ActiveRecord.default_timezone == :utc
606
- variables['timezone'] = 'UTC'
607
- elsif @local_tz
608
- variables['timezone'] = @local_tz
609
- end
610
- end
611
-
612
- # SET statements from :variables config hash
613
- # https://www.postgresql.org/docs/current/static/sql-set.html
614
- variables.map do |k, v|
615
- if [':default', :default].include?(v)
616
- # Sets the value to the global or compile default
617
- execute("SET #{k} TO DEFAULT", 'SCHEMA')
618
- elsif !v.nil?
619
- execute("SET #{k} TO #{quote(v)}", 'SCHEMA')
620
- end
621
- end
622
- end
623
-
624
- def last_insert_id_result(sequence_name) # :nodoc:
625
- exec_query("SELECT currval('#{sequence_name}')", 'SQL')
626
- end
627
-
628
- # Returns the list of a table's column names, data types, and default values.
629
- #
630
- # The underlying query is roughly:
631
- # SELECT column.name, column.type, default.value
632
- # FROM column LEFT JOIN default
633
- # ON column.table_id = default.table_id
634
- # AND column.num = default.column_num
635
- # WHERE column.table_id = get_table_id('table_name')
636
- # AND column.num > 0
637
- # AND NOT column.is_dropped
638
- # ORDER BY column.num
639
- #
640
- # If the table name is not prefixed with a schema, the database will
641
- # take the first match from the schema search path.
642
- #
643
- # Query implementation notes:
644
- # - format_type includes the column size constraint, e.g. varchar(50)
645
- # - ::regclass is a function that gives the id for a table name
646
- def column_definitions(table_name) # :nodoc:
647
- query(<<-END_SQL, 'SCHEMA')
648
- SELECT a.attname, format_type(a.atttypid, a.atttypmod),
649
- pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod
650
- FROM pg_attribute a LEFT JOIN pg_attrdef d
651
- ON a.attrelid = d.adrelid AND a.attnum = d.adnum
652
- WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass
653
- AND a.attnum > 0 AND NOT a.attisdropped
654
- ORDER BY a.attnum
655
- END_SQL
656
- end
657
-
658
- def extract_table_ref_from_insert_sql(sql)
659
- sql[/into\s("[A-Za-z0-9_."\[\]\s]+"|[A-Za-z0-9_."\[\]]+)\s*/im]
660
- Regexp.last_match(1)&.strip
661
- end
662
-
663
- def arel_visitor
664
- Arel::Visitors::PostgreSQL.new(self)
665
- end
666
-
667
- def build_statement_pool
668
- StatementPool.new(@connection, self.class.type_cast_config_to_integer(@config[:statement_limit]))
669
- end
670
-
671
- def can_perform_case_insensitive_comparison_for?(column)
672
- @case_insensitive_cache ||= {}
673
- @case_insensitive_cache[column.sql_type] ||= begin
674
- sql = <<~SQL
675
- SELECT exists(
676
- SELECT * FROM pg_proc
677
- WHERE proname = 'lower'
678
- AND proargtypes = ARRAY[#{quote column.sql_type}::regtype]::oidvector
679
- ) OR exists(
680
- SELECT * FROM pg_proc
681
- INNER JOIN pg_cast
682
- ON ARRAY[casttarget]::oidvector = proargtypes
683
- WHERE proname = 'lower'
684
- AND castsource = #{quote column.sql_type}::regtype
685
- )
686
- SQL
687
- execute_and_clear(sql, 'SCHEMA', []) do |result|
688
- result.getvalue(0, 0)
689
- end
690
- end
691
- end
692
-
693
- def add_pg_encoders
694
- map = PG::TypeMapByClass.new
695
- map[Integer] = PG::TextEncoder::Integer.new
696
- map[TrueClass] = PG::TextEncoder::Boolean.new
697
- map[FalseClass] = PG::TextEncoder::Boolean.new
698
- @connection.type_map_for_queries = map
699
- end
700
-
701
- def update_typemap_for_default_timezone
702
- return if @default_timezone == ActiveRecord.default_timezone || !@timestamp_decoder
703
-
704
- decoder_class =
705
- if ActiveRecord.default_timezone == :utc
706
- PG::TextDecoder::TimestampUtc
707
- else
708
- PG::TextDecoder::TimestampWithoutTimeZone
709
- end
710
-
711
- @timestamp_decoder = decoder_class.new(@timestamp_decoder.to_h)
712
- @connection.type_map_for_results.add_coder(@timestamp_decoder)
713
- @default_timezone = ActiveRecord.default_timezone
714
-
715
- # if default timezone has changed, we need to reconfigure the connection
716
- # (specifically, the session time zone)
717
- configure_connection
718
- end
719
-
720
- def add_pg_decoders
721
- @default_timezone = nil
722
- @timestamp_decoder = nil
723
-
724
- coders_by_name = {
725
- 'int2' => PG::TextDecoder::Integer,
726
- 'int4' => PG::TextDecoder::Integer,
727
- 'int8' => PG::TextDecoder::Integer,
728
- 'oid' => PG::TextDecoder::Integer,
729
- 'float4' => PG::TextDecoder::Float,
730
- 'float8' => PG::TextDecoder::Float,
731
- 'bool' => PG::TextDecoder::Boolean
732
- }
733
-
734
- if defined?(PG::TextDecoder::TimestampUtc)
735
- # Use native PG encoders available since pg-1.1
736
- coders_by_name['timestamp'] = PG::TextDecoder::TimestampUtc
737
- coders_by_name['timestamptz'] = PG::TextDecoder::TimestampWithTimeZone
738
- end
739
-
740
- known_coder_types = coders_by_name.keys.map { |n| quote(n) }
741
- query = <<~SQL % known_coder_types.join(', ')
742
- SELECT t.oid, t.typname
743
- FROM pg_type as t
744
- WHERE t.typname IN (%s)
745
- SQL
746
- coders = execute_and_clear(query, 'SCHEMA', []) do |result|
747
- result.filter_map { |row| construct_coder(row, coders_by_name[row['typname']]) }
748
- end
749
-
750
- map = PG::TypeMapByOid.new
751
- coders.each { |coder| map.add_coder(coder) }
752
- @connection.type_map_for_results = map
753
-
754
- # extract timestamp decoder for use in update_typemap_for_default_timezone
755
- @timestamp_decoder = coders.find { |coder| coder.name == 'timestamp' }
756
- update_typemap_for_default_timezone
757
- end
758
-
759
- def construct_coder(row, coder_class)
760
- return unless coder_class
761
-
762
- coder_class.new(oid: row['oid'].to_i, name: row['typname'])
763
- end
764
-
765
- def create_table_definition(*args) # :nodoc:
766
- Redshift::TableDefinition.new(self, *args)
767
- end
768
- end
769
- end
770
- end
3
+ if ActiveRecord.version >= Gem::Version.new('7.1.0')
4
+ require_relative 'redshift_7_1_adapter'
5
+ elsif ActiveRecord.version >= Gem::Version.new('7.0.0')
6
+ require_relative 'redshift_7_0_adapter'
7
+ else
8
+ raise 'no compatible version of ActiveRecord detected'
9
+ end