sequel 4.14.0 → 4.15.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG +32 -0
- data/README.rdoc +3 -3
- data/Rakefile +1 -1
- data/doc/opening_databases.rdoc +20 -2
- data/doc/release_notes/4.15.0.txt +56 -0
- data/doc/testing.rdoc +10 -4
- data/lib/sequel/adapters/fdbsql.rb +285 -0
- data/lib/sequel/adapters/informix.rb +15 -0
- data/lib/sequel/adapters/jdbc/fdbsql.rb +65 -0
- data/lib/sequel/adapters/mock.rb +1 -0
- data/lib/sequel/adapters/shared/fdbsql.rb +550 -0
- data/lib/sequel/adapters/shared/postgres.rb +23 -10
- data/lib/sequel/database/connecting.rb +1 -1
- data/lib/sequel/database/schema_methods.rb +10 -3
- data/lib/sequel/dataset/placeholder_literalizer.rb +7 -0
- data/lib/sequel/extensions/date_arithmetic.rb +5 -0
- data/lib/sequel/extensions/migration.rb +2 -2
- data/lib/sequel/extensions/pg_array.rb +15 -1
- data/lib/sequel/extensions/pg_json.rb +3 -0
- data/lib/sequel/extensions/pg_json_ops.rb +4 -4
- data/lib/sequel/extensions/schema_dumper.rb +9 -1
- data/lib/sequel/model/associations.rb +70 -21
- data/lib/sequel/plugins/active_model.rb +7 -2
- data/lib/sequel/plugins/many_through_many.rb +1 -0
- data/lib/sequel/plugins/pg_array_associations.rb +2 -1
- data/lib/sequel/plugins/split_values.rb +64 -0
- data/lib/sequel/version.rb +1 -1
- data/spec/adapters/fdbsql_spec.rb +429 -0
- data/spec/adapters/informix_spec.rb +6 -0
- data/spec/adapters/postgres_spec.rb +49 -1
- data/spec/adapters/spec_helper.rb +6 -1
- data/spec/adapters/sqlite_spec.rb +1 -1
- data/spec/core/placeholder_literalizer_spec.rb +10 -0
- data/spec/extensions/date_arithmetic_spec.rb +7 -0
- data/spec/extensions/many_through_many_spec.rb +14 -0
- data/spec/extensions/migration_spec.rb +3 -3
- data/spec/extensions/pg_array_associations_spec.rb +9 -0
- data/spec/extensions/pg_json_ops_spec.rb +4 -8
- data/spec/extensions/schema_dumper_spec.rb +9 -0
- data/spec/extensions/spec_helper.rb +3 -0
- data/spec/extensions/split_values_spec.rb +22 -0
- data/spec/integration/database_test.rb +1 -1
- data/spec/integration/dataset_test.rb +1 -1
- data/spec/integration/eager_loader_test.rb +1 -1
- data/spec/integration/plugin_test.rb +3 -2
- data/spec/integration/prepared_statement_test.rb +3 -3
- data/spec/integration/schema_test.rb +3 -3
- data/spec/integration/spec_helper.rb +6 -1
- data/spec/integration/timezone_test.rb +1 -1
- data/spec/model/association_reflection_spec.rb +29 -0
- data/spec/model/associations_spec.rb +36 -0
- data/spec/model/eager_loading_spec.rb +14 -0
- data/spec/model/spec_helper.rb +3 -0
- data/spec/rspec_helper.rb +4 -0
- metadata +10 -2
@@ -13,11 +13,26 @@ module Sequel
|
|
13
13
|
::Informix.connect(opts[:database], opts[:user], opts[:password])
|
14
14
|
end
|
15
15
|
|
16
|
+
def transaction(opts=OPTS)
|
17
|
+
if @opts[:nolog]
|
18
|
+
yield
|
19
|
+
else
|
20
|
+
super
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
16
24
|
# Returns number of rows affected
|
17
25
|
def execute_dui(sql, opts=OPTS)
|
18
26
|
synchronize(opts[:server]){|c| log_yield(sql){c.immediate(sql)}}
|
19
27
|
end
|
20
28
|
|
29
|
+
def execute_insert(sql, opts=OPTS)
|
30
|
+
synchronize(opts[:server]){|c|
|
31
|
+
log_yield(sql){c.immediate(sql)}
|
32
|
+
c.cursor(%q{select first 1 dbinfo('sqlca.sqlerrd1') from systables}).open.fetch
|
33
|
+
}
|
34
|
+
end
|
35
|
+
|
21
36
|
def execute(sql, opts=OPTS)
|
22
37
|
synchronize(opts[:server]){|c| yield log_yield(sql){c.cursor(sql)}}
|
23
38
|
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
Sequel::JDBC.load_driver('com.foundationdb.sql.jdbc.Driver')
|
2
|
+
Sequel.require 'adapters/shared/fdbsql'
|
3
|
+
|
4
|
+
module Sequel
|
5
|
+
Fdbsql::CONVERTED_EXCEPTIONS << NativeException
|
6
|
+
|
7
|
+
module JDBC
|
8
|
+
Sequel.synchronize do
|
9
|
+
DATABASE_SETUP[:fdbsql] = proc do |db|
|
10
|
+
db.extend(Sequel::JDBC::Fdbsql::DatabaseMethods)
|
11
|
+
db.dataset_class = Sequel::JDBC::Fdbsql::Dataset
|
12
|
+
com.foundationdb.sql.jdbc.Driver
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
# Adapter, Database, and Dataset support for accessing the FoundationDB SQL Layer
|
17
|
+
# via JDBC
|
18
|
+
module Fdbsql
|
19
|
+
# Methods to add to Database instances that access Fdbsql via
|
20
|
+
# JDBC.
|
21
|
+
module DatabaseMethods
|
22
|
+
extend Sequel::Database::ResetIdentifierMangling
|
23
|
+
include Sequel::Fdbsql::DatabaseMethods
|
24
|
+
|
25
|
+
# Add the primary_keys and primary_key_sequences instance variables,
|
26
|
+
# so we can get the correct return values for inserted rows.
|
27
|
+
def self.extended(db)
|
28
|
+
super
|
29
|
+
db.send(:adapter_initialize)
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
DISCONNECT_ERROR_RE = /\A(?:This connection has been closed|An I\/O error occurred while sending to the backend)/
|
35
|
+
def disconnect_error?(exception, opts)
|
36
|
+
super || exception.message =~ DISCONNECT_ERROR_RE
|
37
|
+
end
|
38
|
+
|
39
|
+
def database_exception_sqlstate(exception, opts)
|
40
|
+
if exception.respond_to?(:sql_state)
|
41
|
+
exception.sql_state
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
# Methods to add to Dataset instances that access the FoundationDB SQL Layer via
|
47
|
+
# JDBC.
|
48
|
+
class Dataset < JDBC::Dataset
|
49
|
+
include Sequel::Fdbsql::DatasetMethods
|
50
|
+
|
51
|
+
# Add the shared Fdbsql prepared statement methods
|
52
|
+
def prepare(type, name=nil, *values)
|
53
|
+
ps = to_prepared_statement(type, values)
|
54
|
+
ps.extend(JDBC::Dataset::PreparedStatementMethods)
|
55
|
+
ps.extend(::Sequel::Fdbsql::DatasetMethods::PreparedStatementMethods)
|
56
|
+
if name
|
57
|
+
ps.prepared_statement_name = name
|
58
|
+
db.set_prepared_statement(name, ps)
|
59
|
+
end
|
60
|
+
ps
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
data/lib/sequel/adapters/mock.rb
CHANGED
@@ -0,0 +1,550 @@
|
|
1
|
+
Sequel.require 'adapters/utils/pg_types'
|
2
|
+
|
3
|
+
module Sequel
|
4
|
+
|
5
|
+
# Top level module for holding all FoundationDB SQL Layer related modules and
|
6
|
+
# classes for Sequel.
|
7
|
+
module Fdbsql
|
8
|
+
|
9
|
+
# Array of exceptions that need to be converted. JDBC
|
10
|
+
# uses NativeExceptions, the native adapter uses PGError.
|
11
|
+
CONVERTED_EXCEPTIONS = []
|
12
|
+
|
13
|
+
# Methods shared by Database instances that connect to
|
14
|
+
# the FoundationDB SQL Layer
|
15
|
+
module DatabaseMethods
|
16
|
+
|
17
|
+
# A hash of conversion procs, keyed by type integer (oid) and
|
18
|
+
# having callable values for the conversion proc for that type.
|
19
|
+
attr_reader :conversion_procs
|
20
|
+
|
21
|
+
# Convert given argument so that it can be used directly by pg. Currently, pg doesn't
|
22
|
+
# handle fractional seconds in Time/DateTime or blobs with "\0", and it won't ever
|
23
|
+
# handle Sequel::SQLTime values correctly. Only public for use by the adapter, shouldn't
|
24
|
+
# be used by external code.
|
25
|
+
def bound_variable_arg(arg, conn)
|
26
|
+
case arg
|
27
|
+
# TODO TDD it:
|
28
|
+
when Sequel::SQL::Blob
|
29
|
+
# the 1 means treat this as a binary blob
|
30
|
+
{:value => arg, :format => 1}
|
31
|
+
when Sequel::SQLTime
|
32
|
+
# the literal methods put quotes around things, but this is a bound variable, so we can't use those
|
33
|
+
arg.strftime(BOUND_VARIABLE_SQLTIME_FORMAT)
|
34
|
+
when DateTime, Time
|
35
|
+
# the literal methods put quotes around things, but this is a bound variable, so we can't use those
|
36
|
+
from_application_timestamp(arg).strftime(BOUND_VARIABLE_TIMESTAMP_FORMAT)
|
37
|
+
else
|
38
|
+
arg
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
# Fdbsql uses the :fdbsql database type.
|
43
|
+
def database_type
|
44
|
+
:fdbsql
|
45
|
+
end
|
46
|
+
|
47
|
+
# like PostgreSQL fdbsql uses SERIAL psuedo-type instead of AUTOINCREMENT for
|
48
|
+
# managing incrementing primary keys.
|
49
|
+
def serial_primary_key_options
|
50
|
+
{:primary_key => true, :serial => true, :type=>Integer}
|
51
|
+
end
|
52
|
+
|
53
|
+
# indexes are namespaced per table
|
54
|
+
def global_index_namespace?
|
55
|
+
false
|
56
|
+
end
|
57
|
+
|
58
|
+
# Return primary key for the given table.
|
59
|
+
def primary_key(table_name, opts=OPTS)
|
60
|
+
quoted_table = quote_schema_table(table_name)
|
61
|
+
Sequel.synchronize{return @primary_keys[quoted_table] if @primary_keys.has_key?(quoted_table)}
|
62
|
+
out_identifier, in_identifier = identifier_convertors(opts)
|
63
|
+
schema, table = schema_or_current_and_table(table_name, opts)
|
64
|
+
dataset = metadata_dataset.
|
65
|
+
select(:kc__column_name).
|
66
|
+
from(Sequel.as(:information_schema__key_column_usage, 'kc')).
|
67
|
+
join(Sequel.as(:information_schema__table_constraints, 'tc'),
|
68
|
+
[:table_name, :table_schema, :constraint_name]).
|
69
|
+
where(:kc__table_name => in_identifier.call(table),
|
70
|
+
:kc__table_schema => schema,
|
71
|
+
:tc__constraint_type => 'PRIMARY KEY')
|
72
|
+
value = dataset.map do |row|
|
73
|
+
out_identifier.call(row.delete(:column_name))
|
74
|
+
end
|
75
|
+
value = case value.size
|
76
|
+
when 0 then nil
|
77
|
+
when 1 then value.first
|
78
|
+
else value
|
79
|
+
end
|
80
|
+
Sequel.synchronize{@primary_keys[quoted_table] = value}
|
81
|
+
end
|
82
|
+
|
83
|
+
# the sql layer supports CREATE TABLE IF NOT EXISTS syntax,
|
84
|
+
def supports_create_table_if_not_exists?
|
85
|
+
true
|
86
|
+
end
|
87
|
+
|
88
|
+
# Fdbsql supports deferrable fk constraints
|
89
|
+
def supports_deferrable_foreign_key_constraints?
|
90
|
+
true
|
91
|
+
end
|
92
|
+
|
93
|
+
# the sql layer supports DROP TABLE IF EXISTS
|
94
|
+
def supports_drop_table_if_exists?
|
95
|
+
true
|
96
|
+
end
|
97
|
+
|
98
|
+
# Array of symbols specifying table names in the current database.
|
99
|
+
# The dataset used is yielded to the block if one is provided,
|
100
|
+
# otherwise, an array of symbols of table names is returned.
|
101
|
+
#
|
102
|
+
# Options:
|
103
|
+
# :qualify :: Return the tables as Sequel::SQL::QualifiedIdentifier instances,
|
104
|
+
# using the schema the table is located in as the qualifier.
|
105
|
+
# :schema :: The schema to search
|
106
|
+
# :server :: The server to use
|
107
|
+
def tables(opts=OPTS, &block)
|
108
|
+
tables_or_views('TABLE', opts, &block)
|
109
|
+
end
|
110
|
+
|
111
|
+
# Array of symbols specifying view names in the current database.
|
112
|
+
#
|
113
|
+
# Options:
|
114
|
+
# :qualify :: Return the views as Sequel::SQL::QualifiedIdentifier instances,
|
115
|
+
# using the schema the view is located in as the qualifier.
|
116
|
+
# :schema :: The schema to search
|
117
|
+
# :server :: The server to use
|
118
|
+
def views(opts=OPTS, &block)
|
119
|
+
tables_or_views('VIEW', opts, &block)
|
120
|
+
end
|
121
|
+
|
122
|
+
# Return full foreign key information, including
|
123
|
+
# Postgres returns hash like:
|
124
|
+
# {"b_e_fkey"=> {:name=>:b_e_fkey, :columns=>[:e], :on_update=>:no_action, :on_delete=>:no_action, :deferrable=>false, :table=>:a, :key=>[:c]}}
|
125
|
+
def foreign_key_list(table, opts=OPTS)
|
126
|
+
out_identifier, in_identifier = identifier_convertors(opts)
|
127
|
+
schema, table = schema_or_current_and_table(table, opts)
|
128
|
+
sql_table = in_identifier.call(table)
|
129
|
+
columns_dataset = metadata_dataset.
|
130
|
+
select(:tc__table_name___table_name,
|
131
|
+
:tc__table_schema___table_schema,
|
132
|
+
:tc__is_deferrable___deferrable,
|
133
|
+
:kc__column_name___column_name,
|
134
|
+
:kc__constraint_schema___schema,
|
135
|
+
:kc__constraint_name___name,
|
136
|
+
:rc__update_rule___on_update,
|
137
|
+
:rc__delete_rule___on_delete).
|
138
|
+
from(Sequel.as(:information_schema__table_constraints, 'tc')).
|
139
|
+
join(Sequel.as(:information_schema__key_column_usage, 'kc'),
|
140
|
+
[:constraint_schema, :constraint_name]).
|
141
|
+
join(Sequel.as(:information_schema__referential_constraints, 'rc'),
|
142
|
+
[:constraint_name, :constraint_schema]).
|
143
|
+
where(:tc__table_name => sql_table,
|
144
|
+
:tc__table_schema => schema,
|
145
|
+
:tc__constraint_type => 'FOREIGN KEY')
|
146
|
+
|
147
|
+
keys_dataset = metadata_dataset.
|
148
|
+
select(:rc__constraint_schema___schema,
|
149
|
+
:rc__constraint_name___name,
|
150
|
+
:kc__table_name___key_table,
|
151
|
+
:kc__column_name___key_column).
|
152
|
+
from(Sequel.as(:information_schema__table_constraints, 'tc')).
|
153
|
+
join(Sequel.as(:information_schema__referential_constraints, 'rc'),
|
154
|
+
[:constraint_schema, :constraint_name]).
|
155
|
+
join(Sequel.as(:information_schema__key_column_usage, 'kc'),
|
156
|
+
:kc__constraint_schema => :rc__unique_constraint_schema,
|
157
|
+
:kc__constraint_name => :rc__unique_constraint_name).
|
158
|
+
where(:tc__table_name => sql_table,
|
159
|
+
:tc__table_schema => schema,
|
160
|
+
:tc__constraint_type => 'FOREIGN KEY')
|
161
|
+
foreign_keys = {}
|
162
|
+
columns_dataset.each do |row|
|
163
|
+
foreign_key = foreign_keys.fetch(row[:name]) do |key|
|
164
|
+
foreign_keys[row[:name]] = row
|
165
|
+
row[:name] = out_identifier.call(row[:name])
|
166
|
+
row[:columns] = []
|
167
|
+
row[:key] = []
|
168
|
+
row
|
169
|
+
end
|
170
|
+
foreign_key[:columns] << out_identifier.call(row[:column_name])
|
171
|
+
end
|
172
|
+
keys_dataset.each do |row|
|
173
|
+
foreign_key = foreign_keys[row[:name]]
|
174
|
+
foreign_key[:table] = out_identifier.call(row[:key_table])
|
175
|
+
foreign_key[:key] << out_identifier.call(row[:key_column])
|
176
|
+
end
|
177
|
+
foreign_keys.values
|
178
|
+
end
|
179
|
+
|
180
|
+
# Return indexes for the table
|
181
|
+
# postgres returns:
|
182
|
+
# {:blah_blah_index=>{:columns=>[:n], :unique=>true, :deferrable=>nil},
|
183
|
+
# :items_n_a_index=>{:columns=>[:n, :a], :unique=>false, :deferrable=>nil}}
|
184
|
+
def indexes(table, opts=OPTS)
|
185
|
+
out_identifier, in_identifier = identifier_convertors(opts)
|
186
|
+
schema, table = schema_or_current_and_table(table, opts)
|
187
|
+
dataset = metadata_dataset.
|
188
|
+
select(:is__is_unique,
|
189
|
+
Sequel.as({:is__is_unique => 'YES'}, 'unique'),
|
190
|
+
:is__index_name,
|
191
|
+
:ic__column_name).
|
192
|
+
from(Sequel.as(:information_schema__indexes, 'is')).
|
193
|
+
join(Sequel.as(:information_schema__index_columns, 'ic'),
|
194
|
+
:ic__index_table_schema => :is__table_schema,
|
195
|
+
:ic__index_table_name => :is__table_name,
|
196
|
+
:ic__index_name => :is__index_name).
|
197
|
+
where(:is__table_schema => schema,
|
198
|
+
:is__table_name => in_identifier.call(table)).
|
199
|
+
exclude(:is__index_type => 'PRIMARY')
|
200
|
+
indexes = {}
|
201
|
+
dataset.each do |row|
|
202
|
+
index = indexes.fetch(out_identifier.call(row[:index_name])) do |key|
|
203
|
+
h = { :unique => row[:unique], :columns => [] }
|
204
|
+
indexes[key] = h
|
205
|
+
h
|
206
|
+
end
|
207
|
+
index[:columns] << out_identifier.call(row[:column_name])
|
208
|
+
end
|
209
|
+
indexes
|
210
|
+
end
|
211
|
+
|
212
|
+
private
|
213
|
+
|
214
|
+
# the literal methods put quotes around things, but when we bind a variable there shouldn't be quotes around it
|
215
|
+
# it should just be the timestamp, so we need whole new formats here.
|
216
|
+
BOUND_VARIABLE_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%S".freeze
|
217
|
+
BOUND_VARIABLE_SQLTIME_FORMAT = "%H:%M:%S".freeze
|
218
|
+
|
219
|
+
def adapter_initialize
|
220
|
+
@primary_keys = {}
|
221
|
+
# Postgres supports named types in the db, if we want to support anything that's not built in, this
|
222
|
+
# will have to be changed to not be a constant
|
223
|
+
@conversion_procs = Sequel::Postgres::PG_TYPES.dup
|
224
|
+
@conversion_procs[16] = Proc.new {|s| s == 'true'}
|
225
|
+
@conversion_procs[1184] = @conversion_procs[1114] = method(:to_application_timestamp)
|
226
|
+
@conversion_procs.freeze
|
227
|
+
end
|
228
|
+
|
229
|
+
def alter_table_op_sql(table, op)
|
230
|
+
quoted_name = quote_identifier(op[:name]) if op[:name]
|
231
|
+
case op[:op]
|
232
|
+
when :set_column_type
|
233
|
+
"ALTER COLUMN #{quoted_name} SET DATA TYPE #{type_literal(op)}"
|
234
|
+
when :set_column_null
|
235
|
+
"ALTER COLUMN #{quoted_name} #{op[:null] ? '' : 'NOT'} NULL"
|
236
|
+
else
|
237
|
+
super
|
238
|
+
end
|
239
|
+
end
|
240
|
+
|
241
|
+
# Convert exceptions raised from the block into DatabaseErrors.
|
242
|
+
def check_database_errors
|
243
|
+
begin
|
244
|
+
yield
|
245
|
+
rescue => e
|
246
|
+
raise_error(e, :classes=>CONVERTED_EXCEPTIONS)
|
247
|
+
end
|
248
|
+
end
|
249
|
+
|
250
|
+
def column_schema_normalize_default(default, type)
|
251
|
+
# the default value returned by schema parsing is not escaped or quoted
|
252
|
+
# in any way, it's just the value of the string
|
253
|
+
# the base implementation assumes it would come back "'my ''default'' value'"
|
254
|
+
# fdbsql returns "my 'default' value" (Not including double quotes for either)
|
255
|
+
return default
|
256
|
+
end
|
257
|
+
|
258
|
+
# FDBSQL requires parens around the SELECT, and the WITH DATA syntax.
|
259
|
+
def create_table_as_sql(name, sql, options)
|
260
|
+
"#{create_table_prefix_sql(name, options)} AS (#{sql}) WITH DATA"
|
261
|
+
end
|
262
|
+
|
263
|
+
def database_error_classes
|
264
|
+
CONVERTED_EXCEPTIONS
|
265
|
+
end
|
266
|
+
|
267
|
+
STALE_STATEMENT_SQLSTATE = '0A50A'.freeze
|
268
|
+
NOT_NULL_CONSTRAINT_SQLSTATES = %w'23502'.freeze.each{|s| s.freeze}
|
269
|
+
FOREIGN_KEY_CONSTRAINT_SQLSTATES = %w'23503 23504'.freeze.each{|s| s.freeze}
|
270
|
+
UNIQUE_CONSTRAINT_SQLSTATES = %w'23501'.freeze.each{|s| s.freeze}
|
271
|
+
|
272
|
+
# Given the SQLState, return the appropriate DatabaseError subclass.
|
273
|
+
def database_specific_error_class_from_sqlstate(sqlstate)
|
274
|
+
# There is also a CheckConstraintViolation in Sequel, but the sql layer doesn't support check constraints
|
275
|
+
case sqlstate
|
276
|
+
when *NOT_NULL_CONSTRAINT_SQLSTATES
|
277
|
+
NotNullConstraintViolation
|
278
|
+
when *FOREIGN_KEY_CONSTRAINT_SQLSTATES
|
279
|
+
ForeignKeyConstraintViolation
|
280
|
+
when *UNIQUE_CONSTRAINT_SQLSTATES
|
281
|
+
UniqueConstraintViolation
|
282
|
+
end
|
283
|
+
end
|
284
|
+
|
285
|
+
# This is a fallback used by the base class if the sqlstate fails to figure out
|
286
|
+
# what error type it is.
|
287
|
+
DATABASE_ERROR_REGEXPS = [
|
288
|
+
# Add this check first, since otherwise it's possible for users to control
|
289
|
+
# which exception class is generated.
|
290
|
+
[/invalid input syntax/, DatabaseError],
|
291
|
+
# the rest of these are backups in case the sqlstate fails
|
292
|
+
[/[dD]uplicate key violates unique constraint/, UniqueConstraintViolation],
|
293
|
+
[/due (?:to|for) foreign key constraint/, ForeignKeyConstraintViolation],
|
294
|
+
[/NULL value not permitted/, NotNullConstraintViolation],
|
295
|
+
].freeze
|
296
|
+
|
297
|
+
def database_error_regexps
|
298
|
+
DATABASE_ERROR_REGEXPS
|
299
|
+
end
|
300
|
+
|
301
|
+
def identifier_convertors(opts=OPTS)
|
302
|
+
[output_identifier_meth(opts[:dataset]), input_identifier_meth(opts[:dataset])]
|
303
|
+
end
|
304
|
+
|
305
|
+
# Like PostgreSQL fdbsql folds unquoted identifiers to lowercase, so it shouldn't need to upcase identifiers on input.
|
306
|
+
def identifier_input_method_default
|
307
|
+
nil
|
308
|
+
end
|
309
|
+
|
310
|
+
# Like PostgreSQL fdbsql folds unquoted identifiers to lowercase, so it shouldn't need to upcase identifiers on output.
|
311
|
+
def identifier_output_method_default
|
312
|
+
nil
|
313
|
+
end
|
314
|
+
|
315
|
+
# If the given type is DECIMAL with scale 0, say that it's an integer
|
316
|
+
def normalize_decimal_to_integer(type, scale)
|
317
|
+
if (type == 'DECIMAL' and scale == 0)
|
318
|
+
'integer'
|
319
|
+
else
|
320
|
+
type
|
321
|
+
end
|
322
|
+
end
|
323
|
+
|
324
|
+
# Remove the cached entries for primary keys and sequences when a table is
|
325
|
+
# changed.
|
326
|
+
def remove_cached_schema(table)
|
327
|
+
tab = quote_schema_table(table)
|
328
|
+
Sequel.synchronize do
|
329
|
+
@primary_keys.delete(tab)
|
330
|
+
end
|
331
|
+
super
|
332
|
+
end
|
333
|
+
|
334
|
+
def schema_or_current_and_table(table, opts=OPTS)
|
335
|
+
schema, table = schema_and_table(table)
|
336
|
+
schema = opts.fetch(:schema, schema || Sequel.lit('CURRENT_SCHEMA'))
|
337
|
+
[schema, table]
|
338
|
+
end
|
339
|
+
|
340
|
+
# returns an array of column information with each column being of the form:
|
341
|
+
# [:column_name, {:db_type=>"integer", :default=>nil, :allow_null=>false, :primary_key=>true, :type=>:integer}]
|
342
|
+
def schema_parse_table(table, opts = {})
|
343
|
+
out_identifier, in_identifier = identifier_convertors(opts)
|
344
|
+
schema, table = schema_or_current_and_table(table, opts)
|
345
|
+
dataset = metadata_dataset.
|
346
|
+
select(:c__column_name,
|
347
|
+
Sequel.as({:c__is_nullable => 'YES'}, 'allow_null'),
|
348
|
+
:c__column_default___default,
|
349
|
+
:c__data_type___db_type,
|
350
|
+
:c__character_maximum_length___max_length,
|
351
|
+
:c__numeric_scale,
|
352
|
+
Sequel.as({:tc__constraint_type => 'PRIMARY KEY'}, 'primary_key')).
|
353
|
+
from(Sequel.as(:information_schema__key_column_usage, 'kc')).
|
354
|
+
join(Sequel.as(:information_schema__table_constraints, 'tc'),
|
355
|
+
:tc__constraint_type => 'PRIMARY KEY',
|
356
|
+
:tc__table_name => :kc__table_name,
|
357
|
+
:tc__table_schema => :kc__table_schema,
|
358
|
+
:tc__constraint_name => :kc__constraint_name).
|
359
|
+
right_outer_join(Sequel.as(:information_schema__columns, 'c'),
|
360
|
+
[:table_name, :table_schema, :column_name]).
|
361
|
+
where(:c__table_name => in_identifier.call(table),
|
362
|
+
:c__table_schema => schema)
|
363
|
+
dataset.map do |row|
|
364
|
+
row[:default] = nil if blank_object?(row[:default])
|
365
|
+
row[:type] = schema_column_type(normalize_decimal_to_integer(row[:db_type], row[:numeric_scale]))
|
366
|
+
[out_identifier.call(row.delete(:column_name)), row]
|
367
|
+
end
|
368
|
+
end
|
369
|
+
|
370
|
+
def tables_or_views(type, opts, &block)
|
371
|
+
schema = opts[:schema] || Sequel.lit('CURRENT_SCHEMA')
|
372
|
+
m = output_identifier_meth
|
373
|
+
dataset = metadata_dataset.server(opts[:server]).select(:table_name).
|
374
|
+
from(Sequel.qualify('information_schema','tables')).
|
375
|
+
where(:table_schema => schema,
|
376
|
+
:table_type => type)
|
377
|
+
if block_given?
|
378
|
+
yield(dataset)
|
379
|
+
elsif opts[:qualify]
|
380
|
+
dataset.select_append(:table_schema).map{|r| Sequel.qualify(m.call(r[:table_schema]), m.call(r[:table_name])) }
|
381
|
+
else
|
382
|
+
dataset.map{|r| m.call(r[:table_name])}
|
383
|
+
end
|
384
|
+
end
|
385
|
+
|
386
|
+
# Handle bigserial type if :serial option is present
|
387
|
+
def type_literal_generic_bignum(column)
|
388
|
+
column[:serial] ? :bigserial : super
|
389
|
+
end
|
390
|
+
|
391
|
+
# Handle serial type if :serial option is present
|
392
|
+
def type_literal_generic_integer(column)
|
393
|
+
column[:serial] ? :serial : super
|
394
|
+
end
|
395
|
+
|
396
|
+
end
|
397
|
+
|
398
|
+
# Instance methods for datasets that connect to the FoundationDB SQL Layer.
|
399
|
+
module DatasetMethods
|
400
|
+
|
401
|
+
Dataset.def_sql_method(self, :delete, %w'with delete from using where returning')
|
402
|
+
Dataset.def_sql_method(self, :insert, %w'with insert into columns values returning')
|
403
|
+
Dataset.def_sql_method(self, :update, %w'with update table set from where returning')
|
404
|
+
|
405
|
+
# Shared methods for prepared statements used with the FoundationDB SQL Layer
|
406
|
+
module PreparedStatementMethods
|
407
|
+
|
408
|
+
def prepared_sql
|
409
|
+
return @prepared_sql if @prepared_sql
|
410
|
+
@opts[:returning] = insert_pk if @prepared_type == :insert
|
411
|
+
super
|
412
|
+
@prepared_sql
|
413
|
+
end
|
414
|
+
|
415
|
+
# Override insert action to use RETURNING if the server supports it.
|
416
|
+
def run
|
417
|
+
if @prepared_type == :insert
|
418
|
+
fetch_rows(prepared_sql){|r| return r.values.first}
|
419
|
+
else
|
420
|
+
super
|
421
|
+
end
|
422
|
+
end
|
423
|
+
end
|
424
|
+
|
425
|
+
# Emulate the bitwise operators.
|
426
|
+
def complex_expression_sql_append(sql, op, args)
|
427
|
+
case op
|
428
|
+
when :&, :|, :^, :<<, :>>, :'B~'
|
429
|
+
complex_expression_emulate_append(sql, op, args)
|
430
|
+
# REGEXP_OPERATORS = [:~, :'!~', :'~*', :'!~*']
|
431
|
+
when :'~'
|
432
|
+
function_sql_append(sql, SQL::Function.new(:REGEX, args.at(0), args.at(1)))
|
433
|
+
when :'!~'
|
434
|
+
sql << Sequel::Dataset::NOT_SPACE
|
435
|
+
function_sql_append(sql, SQL::Function.new(:REGEX, args.at(0), args.at(1)))
|
436
|
+
when :'~*'
|
437
|
+
function_sql_append(sql, SQL::Function.new(:IREGEX, args.at(0), args.at(1)))
|
438
|
+
when :'!~*'
|
439
|
+
sql << Sequel::Dataset::NOT_SPACE
|
440
|
+
function_sql_append(sql, SQL::Function.new(:IREGEX, args.at(0), args.at(1)))
|
441
|
+
else
|
442
|
+
super
|
443
|
+
end
|
444
|
+
end
|
445
|
+
|
446
|
+
# Insert given values into the database.
|
447
|
+
def insert(*values)
|
448
|
+
if @opts[:returning]
|
449
|
+
# Already know which columns to return, let the standard code handle it
|
450
|
+
super
|
451
|
+
elsif @opts[:sql] || @opts[:disable_insert_returning]
|
452
|
+
# Raw SQL used or RETURNING disabled, just use the default behavior
|
453
|
+
# and return nil since sequence is not known.
|
454
|
+
super
|
455
|
+
nil
|
456
|
+
else
|
457
|
+
# Force the use of RETURNING with the primary key value,
|
458
|
+
# unless it has been disabled.
|
459
|
+
returning(*insert_pk).insert(*values){|r| return r.values.first}
|
460
|
+
end
|
461
|
+
end
|
462
|
+
|
463
|
+
# Insert a record returning the record inserted. Always returns nil without
|
464
|
+
# inserting a query if disable_insert_returning is used.
|
465
|
+
def insert_select(*values)
|
466
|
+
unless @opts[:disable_insert_returning]
|
467
|
+
ds = opts[:returning] ? self : returning
|
468
|
+
ds.insert(*values){|r| return r}
|
469
|
+
end
|
470
|
+
end
|
471
|
+
|
472
|
+
# The SQL to use for an insert_select, adds a RETURNING clause to the insert
|
473
|
+
# unless the RETURNING clause is already present.
|
474
|
+
def insert_select_sql(*values)
|
475
|
+
ds = opts[:returning] ? self : returning
|
476
|
+
ds.insert_sql(*values)
|
477
|
+
end
|
478
|
+
|
479
|
+
# FDBSQL has functions to support regular expression pattern matching.
|
480
|
+
def supports_regexp?
|
481
|
+
true
|
482
|
+
end
|
483
|
+
|
484
|
+
# Returning is always supported.
|
485
|
+
def supports_returning?(type)
|
486
|
+
true
|
487
|
+
end
|
488
|
+
|
489
|
+
# FDBSQL truncates all seconds
|
490
|
+
def supports_timestamp_usecs?
|
491
|
+
false
|
492
|
+
end
|
493
|
+
|
494
|
+
# FDBSQL supports quoted function names
|
495
|
+
def supports_quoted_function_names?
|
496
|
+
true
|
497
|
+
end
|
498
|
+
|
499
|
+
private
|
500
|
+
|
501
|
+
# Use USING to specify additional tables in a delete query
|
502
|
+
def delete_using_sql(sql)
|
503
|
+
join_from_sql(:USING, sql)
|
504
|
+
end
|
505
|
+
|
506
|
+
# Return the primary key to use for RETURNING in an INSERT statement
|
507
|
+
def insert_pk
|
508
|
+
if (f = opts[:from]) && !f.empty?
|
509
|
+
case t = f.first
|
510
|
+
when Symbol, String, SQL::Identifier, SQL::QualifiedIdentifier
|
511
|
+
if pk = db.primary_key(t)
|
512
|
+
pk
|
513
|
+
end
|
514
|
+
end
|
515
|
+
end
|
516
|
+
end
|
517
|
+
|
518
|
+
# For multiple table support, PostgreSQL requires at least
|
519
|
+
# two from tables, with joins allowed.
|
520
|
+
def join_from_sql(type, sql)
|
521
|
+
if(from = @opts[:from][1..-1]).empty?
|
522
|
+
raise(Error, 'Need multiple FROM tables if updating/deleting a dataset with JOINs') if @opts[:join]
|
523
|
+
else
|
524
|
+
sql << SPACE << type.to_s << SPACE
|
525
|
+
source_list_append(sql, from)
|
526
|
+
select_join_sql(sql)
|
527
|
+
end
|
528
|
+
end
|
529
|
+
|
530
|
+
# FDBSQL uses a preceding x for hex escaping strings
|
531
|
+
def literal_blob_append(sql, v)
|
532
|
+
if v.empty?
|
533
|
+
sql << "''"
|
534
|
+
else
|
535
|
+
sql << "x'#{v.unpack('H*').first}'"
|
536
|
+
end
|
537
|
+
end
|
538
|
+
|
539
|
+
# fdbsql does not support FOR UPDATE, because it's unnecessary with the transaction model
|
540
|
+
def select_lock_sql(sql)
|
541
|
+
@opts[:lock] == :update ? sql : super
|
542
|
+
end
|
543
|
+
|
544
|
+
# Use FROM to specify additional tables in an update query
|
545
|
+
def update_from_sql(sql)
|
546
|
+
join_from_sql(:FROM, sql)
|
547
|
+
end
|
548
|
+
end
|
549
|
+
end
|
550
|
+
end
|