torque-postgresql 3.0.1 → 3.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.rdoc +17 -0
- data/lib/torque/postgresql/adapter/database_statements.rb +61 -7
- data/lib/torque/postgresql/adapter/schema_creation.rb +3 -9
- data/lib/torque/postgresql/adapter/schema_dumper.rb +39 -7
- data/lib/torque/postgresql/adapter/schema_statements.rb +40 -0
- data/lib/torque/postgresql/adapter.rb +2 -2
- data/lib/torque/postgresql/associations/preloader/loader_query.rb +1 -1
- data/lib/torque/postgresql/auxiliary_statement/recursive.rb +149 -0
- data/lib/torque/postgresql/auxiliary_statement/settings.rb +74 -22
- data/lib/torque/postgresql/auxiliary_statement.rb +39 -40
- data/lib/torque/postgresql/base.rb +29 -25
- data/lib/torque/postgresql/config.rb +17 -0
- data/lib/torque/postgresql/inheritance.rb +3 -1
- data/lib/torque/postgresql/migration/command_recorder.rb +8 -8
- data/lib/torque/postgresql/railtie.rb +5 -1
- data/lib/torque/postgresql/relation/auxiliary_statement.rb +28 -15
- data/lib/torque/postgresql/schema_cache.rb +6 -1
- data/lib/torque/postgresql/table_name.rb +41 -0
- data/lib/torque/postgresql/version.rb +1 -1
- data/lib/torque/postgresql.rb +2 -1
- data/spec/models/category.rb +2 -0
- data/spec/models/internal/user.rb +5 -0
- data/spec/schema.rb +16 -0
- data/spec/spec_helper.rb +2 -1
- data/spec/tests/auxiliary_statement_spec.rb +374 -35
- data/spec/tests/enum_set_spec.rb +7 -6
- data/spec/tests/schema_spec.rb +92 -0
- data/spec/tests/table_inheritance_spec.rb +11 -15
- metadata +17 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c677b1f2b2cdf150a4e8e9a817cd7a9a6e46680a353c3125aeb7ab65e1b85348
|
4
|
+
data.tar.gz: 72a25a13491d9349a813484be40c43cacc767355060ba16375da7b7f41739249
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1b6bdeeb8ca02a6027a79fc0d896716603792c1d4cee95131995ef944075f13b983a583a22eb4e3b6b590b8430b7b4eb4469522cf76b47daa30b8c2dd35a7318
|
7
|
+
data.tar.gz: eca4f269833a0823442f247cb065f14fe412c85063e5df8e751a20a366fd296f02db3ad2720b02d7a4169c73b38c1bbfe29160f292671a8a5cf0a23b4ffa9261
|
data/README.rdoc
CHANGED
@@ -128,6 +128,23 @@ reconfigured on the model, and then can be used during querying process.
|
|
128
128
|
|
129
129
|
{Learn more}[link:classes/Torque/PostgreSQL/AuxiliaryStatement.html]
|
130
130
|
|
131
|
+
* Multiple Schemas
|
132
|
+
|
133
|
+
Allows models and modules to have a schema associated with them, so that
|
134
|
+
developers can better organize their tables into schemas and build features in
|
135
|
+
a way that the database can better represent how they are separated.
|
136
|
+
|
137
|
+
create_schema "internal", force: :cascade
|
138
|
+
|
139
|
+
module Internal
|
140
|
+
class User < ActiveRecord::Base
|
141
|
+
self.schema = 'internal'
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
Internal::User.all
|
146
|
+
|
147
|
+
{Learn more}[link:classes/Torque/PostgreSQL/Adapter/DatabaseStatements.html]
|
131
148
|
|
132
149
|
== Download and installation
|
133
150
|
|
@@ -12,6 +12,26 @@ module Torque
|
|
12
12
|
@_dump_mode = !!!@_dump_mode
|
13
13
|
end
|
14
14
|
|
15
|
+
# List of schemas blocked by the application in the current connection
|
16
|
+
def schemas_blacklist
|
17
|
+
@schemas_blacklist ||= Torque::PostgreSQL.config.schemas.blacklist +
|
18
|
+
(@config.dig(:schemas, 'blacklist') || [])
|
19
|
+
end
|
20
|
+
|
21
|
+
# List of schemas used by the application in the current connection
|
22
|
+
def schemas_whitelist
|
23
|
+
@schemas_whitelist ||= Torque::PostgreSQL.config.schemas.whitelist +
|
24
|
+
(@config.dig(:schemas, 'whitelist') || [])
|
25
|
+
end
|
26
|
+
|
27
|
+
# A list of schemas on the search path sanitized
|
28
|
+
def schemas_search_path_sanitized
|
29
|
+
@schemas_search_path_sanitized ||= begin
|
30
|
+
db_user = @config[:username] || ENV['USER'] || ENV['USERNAME']
|
31
|
+
schema_search_path.split(',').map { |item| item.strip.sub('"$user"', db_user) }
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
15
35
|
# Check if a given type is valid.
|
16
36
|
def valid_type?(type)
|
17
37
|
super || extended_types.include?(type)
|
@@ -22,6 +42,17 @@ module Torque
|
|
22
42
|
EXTENDED_DATABASE_TYPES
|
23
43
|
end
|
24
44
|
|
45
|
+
# Checks if a given schema exists in the database. If +filtered+ is
|
46
|
+
# given as false, then it will check regardless of whitelist and
|
47
|
+
# blacklist
|
48
|
+
def schema_exists?(name, filtered: true)
|
49
|
+
return user_defined_schemas.include?(name.to_s) if filtered
|
50
|
+
|
51
|
+
query_value(<<-SQL) == 1
|
52
|
+
SELECT 1 FROM pg_catalog.pg_namespace WHERE nspname = '#{name}'
|
53
|
+
SQL
|
54
|
+
end
|
55
|
+
|
25
56
|
# Returns true if type exists.
|
26
57
|
def type_exists?(name)
|
27
58
|
user_defined_types.key? name.to_s
|
@@ -124,18 +155,41 @@ module Torque
|
|
124
155
|
# Get the list of inherited tables associated with their parent tables
|
125
156
|
def inherited_tables
|
126
157
|
tables = query(<<-SQL, 'SCHEMA')
|
127
|
-
SELECT
|
128
|
-
|
158
|
+
SELECT inhrelid::regclass AS table_name,
|
159
|
+
inhparent::regclass AS inheritances
|
129
160
|
FROM pg_inherits
|
130
161
|
JOIN pg_class parent ON pg_inherits.inhparent = parent.oid
|
131
162
|
JOIN pg_class child ON pg_inherits.inhrelid = child.oid
|
132
|
-
|
133
|
-
ORDER BY pg_inherits.inhrelid
|
163
|
+
ORDER BY inhrelid
|
134
164
|
SQL
|
135
165
|
|
136
|
-
tables.
|
137
|
-
[
|
138
|
-
end
|
166
|
+
tables.each_with_object({}) do |(child, parent), result|
|
167
|
+
(result[child] ||= []) << parent
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
# Get the list of schemas that were created by the user
|
172
|
+
def user_defined_schemas
|
173
|
+
query_values(user_defined_schemas_sql, 'SCHEMA')
|
174
|
+
end
|
175
|
+
|
176
|
+
# Build the query for allowed schemas
|
177
|
+
def user_defined_schemas_sql
|
178
|
+
conditions = []
|
179
|
+
conditions << <<-SQL if schemas_blacklist.any?
|
180
|
+
nspname NOT LIKE ANY (ARRAY['#{schemas_blacklist.join("', '")}'])
|
181
|
+
SQL
|
182
|
+
|
183
|
+
conditions << <<-SQL if schemas_whitelist.any?
|
184
|
+
nspname LIKE ANY (ARRAY['#{schemas_whitelist.join("', '")}'])
|
185
|
+
SQL
|
186
|
+
|
187
|
+
<<-SQL.squish
|
188
|
+
SELECT nspname
|
189
|
+
FROM pg_catalog.pg_namespace
|
190
|
+
WHERE 1=1 AND #{conditions.join(' AND ')}
|
191
|
+
ORDER BY oid
|
192
|
+
SQL
|
139
193
|
end
|
140
194
|
|
141
195
|
# Get the list of columns, and their definition, but only from the
|
@@ -13,21 +13,15 @@ module Torque
|
|
13
13
|
statements << accept(o.primary_keys) if o.primary_keys
|
14
14
|
|
15
15
|
if supports_indexes_in_create?
|
16
|
-
statements.concat(o.indexes.map
|
17
|
-
index_in_create(o.name, column_name, options)
|
18
|
-
end)
|
16
|
+
statements.concat(o.indexes.map { |c, o| index_in_create(o.name, c, o) })
|
19
17
|
end
|
20
18
|
|
21
19
|
if supports_foreign_keys?
|
22
|
-
statements.concat(o.foreign_keys.map
|
23
|
-
foreign_key_in_create(o.name, to_table, options)
|
24
|
-
end)
|
20
|
+
statements.concat(o.foreign_keys.map { |fk| accept fk })
|
25
21
|
end
|
26
22
|
|
27
23
|
if respond_to?(:supports_check_constraints?) && supports_check_constraints?
|
28
|
-
statements.concat(o.check_constraints.map
|
29
|
-
check_constraint_in_create(o.name, expression, options)
|
30
|
-
end)
|
24
|
+
statements.concat(o.check_constraints.map { |chk| accept chk })
|
31
25
|
end
|
32
26
|
|
33
27
|
create_sql << "(#{statements.join(', ')})" \
|
@@ -12,6 +12,11 @@ module Torque
|
|
12
12
|
stream
|
13
13
|
end
|
14
14
|
|
15
|
+
def extensions(stream) # :nodoc:
|
16
|
+
super
|
17
|
+
user_defined_schemas(stream)
|
18
|
+
end
|
19
|
+
|
15
20
|
# Translate +:enum_set+ into +:enum+
|
16
21
|
def schema_type(column)
|
17
22
|
column.type == :enum_set ? :enum : super
|
@@ -20,8 +25,23 @@ module Torque
|
|
20
25
|
private
|
21
26
|
|
22
27
|
def tables(stream) # :nodoc:
|
28
|
+
around_tables(stream) { dump_tables(stream) }
|
29
|
+
end
|
30
|
+
|
31
|
+
def around_tables(stream)
|
32
|
+
functions(stream) if fx_functions_position == :beginning
|
33
|
+
|
34
|
+
yield
|
35
|
+
|
36
|
+
functions(stream) if fx_functions_position == :end
|
37
|
+
triggers(stream) if defined?(::Fx::SchemaDumper::Trigger)
|
38
|
+
end
|
39
|
+
|
40
|
+
def dump_tables(stream)
|
23
41
|
inherited_tables = @connection.inherited_tables
|
24
|
-
sorted_tables = @connection.tables
|
42
|
+
sorted_tables = (@connection.tables - @connection.views).sort_by do |table_name|
|
43
|
+
table_name.split(/(?:public)?\./).reverse
|
44
|
+
end
|
25
45
|
|
26
46
|
stream.puts " # These are the common tables"
|
27
47
|
(sorted_tables - inherited_tables.keys).each do |table_name|
|
@@ -38,7 +58,7 @@ module Torque
|
|
38
58
|
|
39
59
|
# Add the inherits setting
|
40
60
|
sub_stream.rewind
|
41
|
-
inherits.map!(
|
61
|
+
inherits.map! { |parent| parent.to_s.sub(/\Apublic\./, '') }
|
42
62
|
inherits = inherits.first if inherits.size === 1
|
43
63
|
inherits = ", inherits: #{inherits.inspect} do |t|"
|
44
64
|
table_dump = sub_stream.read.gsub(/ do \|t\|$/, inherits)
|
@@ -55,13 +75,25 @@ module Torque
|
|
55
75
|
foreign_keys(tbl, stream) unless ignored?(tbl)
|
56
76
|
end
|
57
77
|
end
|
78
|
+
end
|
79
|
+
|
80
|
+
# Make sure to remove the schema from the table name
|
81
|
+
def remove_prefix_and_suffix(table)
|
82
|
+
super(table.sub(/\A[a-z0-9_]*\./, ''))
|
83
|
+
end
|
58
84
|
|
59
|
-
|
60
|
-
|
85
|
+
# Dump user defined schemas
|
86
|
+
def user_defined_schemas(stream)
|
87
|
+
return if (list = (@connection.user_defined_schemas - ['public'])).empty?
|
61
88
|
|
62
|
-
#
|
63
|
-
|
64
|
-
|
89
|
+
stream.puts " # Custom schemas defined in this database."
|
90
|
+
list.each { |name| stream.puts " create_schema \"#{name}\", force: :cascade" }
|
91
|
+
stream.puts
|
92
|
+
end
|
93
|
+
|
94
|
+
def fx_functions_position
|
95
|
+
return unless defined?(::Fx::SchemaDumper::Function)
|
96
|
+
Fx.configuration.dump_functions_at_beginning_of_schema ? :beginning : :end
|
65
97
|
end
|
66
98
|
end
|
67
99
|
|
@@ -7,6 +7,21 @@ module Torque
|
|
7
7
|
|
8
8
|
TableDefinition = ActiveRecord::ConnectionAdapters::PostgreSQL::TableDefinition
|
9
9
|
|
10
|
+
# Create a new schema
|
11
|
+
def create_schema(name, options = {})
|
12
|
+
drop_schema(name, options) if options[:force]
|
13
|
+
|
14
|
+
check = 'IF NOT EXISTS' if options.fetch(:check, true)
|
15
|
+
execute("CREATE SCHEMA #{check} #{quote_schema_name(name.to_s)}")
|
16
|
+
end
|
17
|
+
|
18
|
+
# Drop an existing schema
|
19
|
+
def drop_schema(name, options = {})
|
20
|
+
force = options.fetch(:force, '').upcase
|
21
|
+
check = 'IF EXISTS' if options.fetch(:check, true)
|
22
|
+
execute("DROP SCHEMA #{check} #{quote_schema_name(name.to_s)} #{force}")
|
23
|
+
end
|
24
|
+
|
10
25
|
# Drops a type.
|
11
26
|
def drop_type(name, options = {})
|
12
27
|
force = options.fetch(:force, '').upcase
|
@@ -64,12 +79,37 @@ module Torque
|
|
64
79
|
|
65
80
|
# Rewrite the method that creates tables to easily accept extra options
|
66
81
|
def create_table(table_name, **options, &block)
|
82
|
+
table_name = "#{options[:schema]}.#{table_name}" if options[:schema].present?
|
83
|
+
|
67
84
|
options[:id] = false if options[:inherits].present? &&
|
68
85
|
options[:primary_key].blank? && options[:id].blank?
|
69
86
|
|
70
87
|
super table_name, **options, &block
|
71
88
|
end
|
72
89
|
|
90
|
+
# Add the schema option when extracting table options
|
91
|
+
def table_options(table_name)
|
92
|
+
parts = table_name.split('.').reverse
|
93
|
+
return super unless parts.size == 2 && parts[1] != 'public'
|
94
|
+
|
95
|
+
(super || {}).merge(schema: parts[1])
|
96
|
+
end
|
97
|
+
|
98
|
+
# When dumping the schema we need to add all schemas, not only those
|
99
|
+
# active for the current +schema_search_path+
|
100
|
+
def quoted_scope(name = nil, type: nil)
|
101
|
+
return super unless name.nil?
|
102
|
+
|
103
|
+
super.merge(schema: "ANY ('{#{user_defined_schemas.join(',')}}')")
|
104
|
+
end
|
105
|
+
|
106
|
+
# Fix the query to include the schema on tables names when dumping
|
107
|
+
def data_source_sql(name = nil, type: nil)
|
108
|
+
return super unless name.nil?
|
109
|
+
|
110
|
+
super.sub('SELECT c.relname FROM', "SELECT n.nspname || '.' || c.relname FROM")
|
111
|
+
end
|
112
|
+
|
73
113
|
private
|
74
114
|
|
75
115
|
def quote_enum_values(name, values, options)
|
@@ -31,9 +31,9 @@ module Torque
|
|
31
31
|
)
|
32
32
|
end
|
33
33
|
|
34
|
-
# Add `inherits` to the list of extracted table options
|
34
|
+
# Add `inherits` and `schema` to the list of extracted table options
|
35
35
|
def extract_table_options!(options)
|
36
|
-
super.merge(options.extract!(:inherits))
|
36
|
+
super.merge(options.extract!(:inherits, :schema))
|
37
37
|
end
|
38
38
|
|
39
39
|
# Allow filtered bulk insert by adding the where clause. This method is
|
@@ -0,0 +1,149 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Torque
|
4
|
+
module PostgreSQL
|
5
|
+
class AuxiliaryStatement
|
6
|
+
class Recursive < AuxiliaryStatement
|
7
|
+
# Setup any additional option in the recursive mode
|
8
|
+
def initialize(*, **options)
|
9
|
+
super
|
10
|
+
|
11
|
+
@connect = options[:connect]&.to_a&.first
|
12
|
+
@union_all = options[:union_all]
|
13
|
+
@sub_query = options[:sub_query]
|
14
|
+
|
15
|
+
if options.key?(:with_depth)
|
16
|
+
@depth = options[:with_depth].values_at(:name, :start, :as)
|
17
|
+
@depth[0] ||= 'depth'
|
18
|
+
end
|
19
|
+
|
20
|
+
if options.key?(:with_path)
|
21
|
+
@path = options[:with_path].values_at(:name, :source, :as)
|
22
|
+
@path[0] ||= 'path'
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
private
|
27
|
+
|
28
|
+
# Build the string or arel query
|
29
|
+
def build_query(base)
|
30
|
+
# Expose columns and get the list of the ones for select
|
31
|
+
columns = expose_columns(base, @query.try(:arel_table))
|
32
|
+
sub_columns = columns.dup
|
33
|
+
type = @union_all.present? ? 'all' : ''
|
34
|
+
|
35
|
+
# Build any extra columns that are dynamic and from the recursion
|
36
|
+
extra_columns(base, columns, sub_columns)
|
37
|
+
|
38
|
+
# Prepare the query depending on its type
|
39
|
+
if @query.is_a?(String) && @sub_query.is_a?(String)
|
40
|
+
args = @args.each_with_object({}) { |h, (k, v)| h[k] = base.connection.quote(v) }
|
41
|
+
::Arel.sql("(#{@query} UNION #{type.upcase} #{@sub_query})" % args)
|
42
|
+
elsif relation_query?(@query)
|
43
|
+
@query = @query.where(@where) if @where.present?
|
44
|
+
@bound_attributes.concat(@query.send(:bound_attributes))
|
45
|
+
|
46
|
+
if relation_query?(@sub_query)
|
47
|
+
@bound_attributes.concat(@sub_query.send(:bound_attributes))
|
48
|
+
|
49
|
+
sub_query = @sub_query.select(*sub_columns).arel
|
50
|
+
sub_query.from([@sub_query.arel_table, table])
|
51
|
+
else
|
52
|
+
sub_query = ::Arel.sql(@sub_query)
|
53
|
+
end
|
54
|
+
|
55
|
+
@query.select(*columns).arel.union(type, sub_query)
|
56
|
+
else
|
57
|
+
raise ArgumentError, <<-MSG.squish
|
58
|
+
Only String and ActiveRecord::Base objects are accepted as query and sub query
|
59
|
+
objects, #{@query.class.name} given for #{self.class.name}.
|
60
|
+
MSG
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
# Setup the statement using the class configuration
|
65
|
+
def prepare(base, settings)
|
66
|
+
super
|
67
|
+
|
68
|
+
prepare_sub_query(base, settings)
|
69
|
+
end
|
70
|
+
|
71
|
+
# Make sure that both parts of the union are ready
|
72
|
+
def prepare_sub_query(base, settings)
|
73
|
+
@union_all = settings.union_all if @union_all.nil?
|
74
|
+
@sub_query ||= settings.sub_query
|
75
|
+
@depth ||= settings.depth
|
76
|
+
@path ||= settings.path
|
77
|
+
|
78
|
+
# Collect the connection
|
79
|
+
@connect ||= settings.connect || begin
|
80
|
+
key = base.primary_key
|
81
|
+
[key.to_sym, :"parent_#{key}"] unless key.nil?
|
82
|
+
end
|
83
|
+
|
84
|
+
raise ArgumentError, <<-MSG.squish if @sub_query.nil? && @query.is_a?(String)
|
85
|
+
Unable to generate sub query from a string query. Please provide a `sub_query`
|
86
|
+
property on the "#{table_name}" settings.
|
87
|
+
MSG
|
88
|
+
|
89
|
+
if @sub_query.nil?
|
90
|
+
raise ArgumentError, <<-MSG.squish if @connect.blank?
|
91
|
+
Unable to generate sub query without setting up a proper way to connect it
|
92
|
+
with the main query. Please provide a `connect` property on the "#{table_name}"
|
93
|
+
settings.
|
94
|
+
MSG
|
95
|
+
|
96
|
+
left, right = @connect.map(&:to_s)
|
97
|
+
condition = @query.arel_table[right].eq(table[left])
|
98
|
+
|
99
|
+
if @query.where_values_hash.key?(right)
|
100
|
+
@sub_query = @query.unscope(where: right.to_sym).where(condition)
|
101
|
+
else
|
102
|
+
@sub_query = @query.where(condition)
|
103
|
+
@query = @query.where(right => nil)
|
104
|
+
end
|
105
|
+
elsif @sub_query.respond_to?(:call)
|
106
|
+
# Call a proc to get the real sub query
|
107
|
+
call_args = @sub_query.try(:arity) === 0 ? [] : [OpenStruct.new(@args)]
|
108
|
+
@sub_query = @sub_query.call(*call_args)
|
109
|
+
end
|
110
|
+
end
|
111
|
+
|
112
|
+
# Add depth and path if they were defined in settings
|
113
|
+
def extra_columns(base, columns, sub_columns)
|
114
|
+
return if @query.is_a?(String) || @sub_query.is_a?(String)
|
115
|
+
|
116
|
+
# Add the connect attribute to the query
|
117
|
+
if defined?(@connect)
|
118
|
+
columns.unshift(@query.arel_table[@connect[0]])
|
119
|
+
sub_columns.unshift(@sub_query.arel_table[@connect[0]])
|
120
|
+
end
|
121
|
+
|
122
|
+
# Build a column to represent the depth of the recursion
|
123
|
+
if @depth.present?
|
124
|
+
name, start, as = @depth
|
125
|
+
col = table[name]
|
126
|
+
base.select_extra_values += [col.as(as)] unless as.nil?
|
127
|
+
|
128
|
+
columns << ::Arel.sql(start.to_s).as(name)
|
129
|
+
sub_columns << (col + ::Arel.sql('1')).as(name)
|
130
|
+
end
|
131
|
+
|
132
|
+
# Build a column to represent the path of the record access
|
133
|
+
if @path.present?
|
134
|
+
name, source, as = @path
|
135
|
+
source = @query.arel_table[source || @connect[0]]
|
136
|
+
|
137
|
+
col = table[name]
|
138
|
+
base.select_extra_values += [col.as(as)] unless as.nil?
|
139
|
+
parts = [col, source.cast(:varchar)]
|
140
|
+
|
141
|
+
columns << ::Arel.array([source]).cast(:varchar, true).as(name)
|
142
|
+
sub_columns << ::Arel::Nodes::NamedFunction.new('array_append', parts).as(name)
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
149
|
+
end
|
@@ -4,9 +4,9 @@ module Torque
|
|
4
4
|
module PostgreSQL
|
5
5
|
class AuxiliaryStatement
|
6
6
|
class Settings < Collector.new(:attributes, :join, :join_type, :query, :requires,
|
7
|
-
:polymorphic, :through)
|
7
|
+
:polymorphic, :through, :union_all, :connect)
|
8
8
|
|
9
|
-
attr_reader :base, :source
|
9
|
+
attr_reader :base, :source, :depth, :path
|
10
10
|
alias_method :select, :attributes
|
11
11
|
alias_method :cte, :source
|
12
12
|
|
@@ -14,9 +14,10 @@ module Torque
|
|
14
14
|
delegate :table, :table_name, to: :@source
|
15
15
|
delegate :sql, to: ::Arel
|
16
16
|
|
17
|
-
def initialize(base, source)
|
17
|
+
def initialize(base, source, recursive = false)
|
18
18
|
@base = base
|
19
19
|
@source = source
|
20
|
+
@recursive = recursive
|
20
21
|
end
|
21
22
|
|
22
23
|
def base_name
|
@@ -27,6 +28,38 @@ module Torque
|
|
27
28
|
@base.arel_table
|
28
29
|
end
|
29
30
|
|
31
|
+
def recursive?
|
32
|
+
@recursive
|
33
|
+
end
|
34
|
+
|
35
|
+
def depth?
|
36
|
+
defined?(@depth)
|
37
|
+
end
|
38
|
+
|
39
|
+
def path?
|
40
|
+
defined?(@path)
|
41
|
+
end
|
42
|
+
|
43
|
+
# Add an attribute to the result showing the depth of each iteration
|
44
|
+
def with_depth(name = 'depth', start: 0, as: nil)
|
45
|
+
@depth = [name.to_s, start, as&.to_s] if recursive?
|
46
|
+
end
|
47
|
+
|
48
|
+
# Add an attribute to the result showing the path of each record
|
49
|
+
def with_path(name = 'path', source: nil, as: nil)
|
50
|
+
@path = [name.to_s, source&.to_s, as&.to_s] if recursive?
|
51
|
+
end
|
52
|
+
|
53
|
+
# Set recursive operation to use union all
|
54
|
+
def union_all!
|
55
|
+
@union_all = true if recursive?
|
56
|
+
end
|
57
|
+
|
58
|
+
# Add both depth and path to the result
|
59
|
+
def with_depth_and_path
|
60
|
+
with_depth && with_path
|
61
|
+
end
|
62
|
+
|
30
63
|
# Get the arel version of the table set on the query
|
31
64
|
def query_table
|
32
65
|
raise StandardError, 'The query is not defined yet' if query.nil?
|
@@ -41,36 +74,55 @@ module Torque
|
|
41
74
|
|
42
75
|
alias column col
|
43
76
|
|
44
|
-
# There are
|
77
|
+
# There are three ways of setting the query:
|
45
78
|
# - A simple relation based on a Model
|
46
79
|
# - A Arel-based select manager
|
47
|
-
# - A string or a proc
|
80
|
+
# - A string or a proc
|
48
81
|
def query(value = nil, command = nil)
|
49
82
|
return @query if value.nil?
|
50
|
-
return @query = value if relation_query?(value)
|
51
83
|
|
52
|
-
|
53
|
-
|
54
|
-
@query_table = value.source.left.name
|
55
|
-
return
|
56
|
-
end
|
84
|
+
@query = sanitize_query(value, command)
|
85
|
+
end
|
57
86
|
|
58
|
-
|
87
|
+
# Same as query, but for the second part of the union for recursive cte
|
88
|
+
def sub_query(value = nil, command = nil)
|
89
|
+
return unless recursive?
|
90
|
+
return @sub_query if value.nil?
|
59
91
|
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
92
|
+
@sub_query = sanitize_query(value, command)
|
93
|
+
end
|
94
|
+
|
95
|
+
# Assume `parent_` as the other part if provided a Symbol or String
|
96
|
+
def connect(value = nil)
|
97
|
+
return @connect if value.nil?
|
64
98
|
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
MSG
|
99
|
+
value = [value.to_sym, :"parent_#{value}"] \
|
100
|
+
if value.is_a?(String) || value.is_a?(Symbol)
|
101
|
+
value = value.to_a.first if value.is_a?(Hash)
|
69
102
|
|
70
|
-
@
|
71
|
-
@query_table = ::Arel::Table.new(value)
|
103
|
+
@connect = value
|
72
104
|
end
|
73
105
|
|
106
|
+
alias connect= connect
|
107
|
+
|
108
|
+
private
|
109
|
+
|
110
|
+
# Get the query and table from the params
|
111
|
+
def sanitize_query(value, command = nil)
|
112
|
+
return value if relation_query?(value)
|
113
|
+
return value if value.is_a?(::Arel::SelectManager)
|
114
|
+
|
115
|
+
command = value if command.nil? # For compatibility purposes
|
116
|
+
valid_type = command.respond_to?(:call) || command.is_a?(String)
|
117
|
+
|
118
|
+
raise ArgumentError, <<-MSG.squish unless valid_type
|
119
|
+
Only relation, string and proc are valid object types for query,
|
120
|
+
#{command.inspect} given.
|
121
|
+
MSG
|
122
|
+
|
123
|
+
command
|
124
|
+
end
|
125
|
+
|
74
126
|
end
|
75
127
|
end
|
76
128
|
end
|