prato 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +5 -0
- data/LICENSE.txt +21 -0
- data/README.md +938 -0
- data/lib/prato/configuration.rb +99 -0
- data/lib/prato/internal/active_record_version.rb +24 -0
- data/lib/prato/internal/join_helper.rb +48 -0
- data/lib/prato/internal/join_helper_legacy.rb +171 -0
- data/lib/prato/internal/lazy_loader_cache.rb +25 -0
- data/lib/prato/internal/pipeline/filtering.rb +277 -0
- data/lib/prato/internal/pipeline/pagination.rb +30 -0
- data/lib/prato/internal/pipeline/serializer.rb +87 -0
- data/lib/prato/internal/pipeline/sorting.rb +78 -0
- data/lib/prato/internal/query_executor.rb +105 -0
- data/lib/prato/internal/query_state.rb +90 -0
- data/lib/prato/internal/specification.rb +101 -0
- data/lib/prato/internal/specification_builder.rb +361 -0
- data/lib/prato/internal/sql_support.rb +118 -0
- data/lib/prato/query/and_filter.rb +13 -0
- data/lib/prato/query/default_parser.rb +148 -0
- data/lib/prato/query/field_resolver.rb +23 -0
- data/lib/prato/query/filter.rb +15 -0
- data/lib/prato/query/or_filter.rb +13 -0
- data/lib/prato/query/parameters.rb +17 -0
- data/lib/prato/query/sort.rb +14 -0
- data/lib/prato/table.rb +39 -0
- data/lib/prato/table_builder.rb +40 -0
- data/lib/prato/types/aggregate_column.rb +93 -0
- data/lib/prato/types/association_column.rb +37 -0
- data/lib/prato/types/direct_column.rb +27 -0
- data/lib/prato/types/expression_column.rb +38 -0
- data/lib/prato/types/ruby_column.rb +31 -0
- data/lib/prato/version.rb +5 -0
- data/lib/prato.rb +66 -0
- metadata +96 -0
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Prato
|
|
4
|
+
module Internal
|
|
5
|
+
module Pipeline
|
|
6
|
+
module Serializer
|
|
7
|
+
extend self
|
|
8
|
+
|
|
9
|
+
def serialize_query(query_state, spec, raw_fields)
|
|
10
|
+
fields = raw_fields || spec.visible_fields
|
|
11
|
+
|
|
12
|
+
if query_state.unmaterialized? && spec.sql_only?(fields)
|
|
13
|
+
optimized_serialization(query_state, spec, fields)
|
|
14
|
+
else
|
|
15
|
+
normal_serialization(query_state, spec, fields)
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
private
|
|
20
|
+
|
|
21
|
+
def optimized_serialization(query_state, spec, fields)
|
|
22
|
+
columns = spec.columns
|
|
23
|
+
scope = query_state.dataset
|
|
24
|
+
association_paths = []
|
|
25
|
+
|
|
26
|
+
fields.each do |field|
|
|
27
|
+
column = columns[field]
|
|
28
|
+
association_paths << column.association_path if column.is_a?(Types::AssociationColumn)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
scope = Internal::JoinHelper.ensure_left_joins(scope, association_paths.uniq)
|
|
32
|
+
|
|
33
|
+
selects = fields.map do |field|
|
|
34
|
+
column = columns[field]
|
|
35
|
+
|
|
36
|
+
case column
|
|
37
|
+
when Types::DirectColumn, Types::AssociationColumn, Types::AggregateColumn, Types::ExpressionColumn
|
|
38
|
+
column.sql_node_for(scope)
|
|
39
|
+
else
|
|
40
|
+
raise "Assertion error: Trying to serialize with unknown column type: #{column.class}"
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
|
|
44
|
+
rows = scope.pluck(*selects)
|
|
45
|
+
|
|
46
|
+
rows.map do |data|
|
|
47
|
+
fields.each_with_object({}).with_index do |(field, hash), idx|
|
|
48
|
+
column = columns[field]
|
|
49
|
+
|
|
50
|
+
value = Array(data)[idx]
|
|
51
|
+
value = column.format.call(value) if column.format
|
|
52
|
+
|
|
53
|
+
assign_value(hash, spec, field, value)
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def normal_serialization(query_state, spec, fields)
|
|
59
|
+
records, ruby_loaded_data = query_state.materialized_dataset(spec)
|
|
60
|
+
columns = spec.columns
|
|
61
|
+
|
|
62
|
+
records.map do |record|
|
|
63
|
+
fields.each_with_object({}) do |field, hash|
|
|
64
|
+
column = columns[field]
|
|
65
|
+
|
|
66
|
+
value = if column.is_a?(Types::RubyColumn)
|
|
67
|
+
column.extract_value(record, ruby_loaded_data)
|
|
68
|
+
else
|
|
69
|
+
value = column.extract_value(record, nil)
|
|
70
|
+
column.format ? column.format.call(value) : value
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
assign_value(hash, spec, field, value)
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
def assign_value(hash, spec, field, value)
|
|
79
|
+
output_path = spec.field_mapping(field)
|
|
80
|
+
current = hash
|
|
81
|
+
output_path[0..-2].each { |key| current = (current[key] ||= {}) }
|
|
82
|
+
current[output_path.last] = value
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
end
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Prato
|
|
4
|
+
module Internal
|
|
5
|
+
module Pipeline
|
|
6
|
+
module Sorting
|
|
7
|
+
extend self
|
|
8
|
+
|
|
9
|
+
def sort_query(query_state, spec, raw_sorts)
|
|
10
|
+
return query_state if raw_sorts.nil?
|
|
11
|
+
|
|
12
|
+
sorts = Array(raw_sorts)
|
|
13
|
+
|
|
14
|
+
any_ruby = sorts.any? { |s| spec.columns[s.field].is_a?(Types::RubyColumn) }
|
|
15
|
+
|
|
16
|
+
if any_ruby || !query_state.unmaterialized?
|
|
17
|
+
apply_ruby_sorts(query_state, spec, sorts)
|
|
18
|
+
else
|
|
19
|
+
apply_sql_sorts(query_state, spec, sorts)
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
private
|
|
24
|
+
|
|
25
|
+
def apply_sql_sorts(query_state, spec, sorts)
|
|
26
|
+
scope = query_state.dataset
|
|
27
|
+
|
|
28
|
+
sorts.each do |sort|
|
|
29
|
+
column = spec.columns[sort.field]
|
|
30
|
+
scope = Internal::JoinHelper.ensure_join(scope, column, left_outer: true)
|
|
31
|
+
node = column.sql_node_for(scope)
|
|
32
|
+
order = build_order_node(node, sort.is_desc)
|
|
33
|
+
scope = scope.order(order)
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
query_state.with_dataset(scope)
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def apply_ruby_sorts(query_state, spec, sorts)
|
|
40
|
+
records, ruby_data = query_state.materialized_dataset(spec)
|
|
41
|
+
|
|
42
|
+
sorted = records.sort do |a, b|
|
|
43
|
+
sorts.reduce(0) do |cmp, sort|
|
|
44
|
+
next cmp unless cmp.zero?
|
|
45
|
+
|
|
46
|
+
column = spec.columns[sort.field]
|
|
47
|
+
val_a = column.extract_value(a, ruby_data)
|
|
48
|
+
val_b = column.extract_value(b, ruby_data)
|
|
49
|
+
|
|
50
|
+
result = safe_compare(val_a, val_b)
|
|
51
|
+
sort.is_desc ? -result : result
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
query_state.with_dataset(sorted)
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
def safe_compare(a, b)
|
|
59
|
+
return 0 if a.nil? && b.nil?
|
|
60
|
+
return 1 if a.nil?
|
|
61
|
+
return -1 if b.nil?
|
|
62
|
+
|
|
63
|
+
a <=> b
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
if ActiveRecordVersion.supports_arel_desc?
|
|
67
|
+
def build_order_node(node, is_desc)
|
|
68
|
+
is_desc ? node.desc : node.asc
|
|
69
|
+
end
|
|
70
|
+
else
|
|
71
|
+
def build_order_node(node, is_desc)
|
|
72
|
+
is_desc ? Arel::Nodes::Descending.new(node) : Arel::Nodes::Ascending.new(node)
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
end
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Prato
|
|
4
|
+
module Internal
|
|
5
|
+
module QueryExecutor
|
|
6
|
+
extend self
|
|
7
|
+
|
|
8
|
+
def execute(scope, spec, raw_params:, paginated: true)
|
|
9
|
+
config = spec.config
|
|
10
|
+
params = resolve_parameters(raw_params, config, spec)
|
|
11
|
+
|
|
12
|
+
materialization_fields = spec.validate_and_extract_materialization_fields(params)
|
|
13
|
+
return invalid_input_result(config, paginated) if materialization_fields.nil?
|
|
14
|
+
|
|
15
|
+
base_query_state = QueryState.create(scope, materialization_fields)
|
|
16
|
+
|
|
17
|
+
filtered_query = Pipeline::Filtering.filter_query(base_query_state, spec, params&.filters)
|
|
18
|
+
sorted_query = Pipeline::Sorting.sort_query(filtered_query, spec, params&.sorts)
|
|
19
|
+
|
|
20
|
+
if paginated
|
|
21
|
+
paginated_query = Pipeline::Pagination.paginate_query(sorted_query, config, params&.page, params&.per_page)
|
|
22
|
+
data = Pipeline::Serializer.serialize_query(paginated_query, spec, params&.fields)
|
|
23
|
+
|
|
24
|
+
{ entries: data, totalCount: total_count(sorted_query) }
|
|
25
|
+
else
|
|
26
|
+
Pipeline::Serializer.serialize_query(sorted_query, spec, params&.fields)
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def execute_in_batches(scope, spec, raw_params:, batch_size:)
|
|
31
|
+
config = spec.config
|
|
32
|
+
params = resolve_parameters(raw_params, config, spec)
|
|
33
|
+
|
|
34
|
+
materialization_fields = spec.validate_and_extract_materialization_fields(params)
|
|
35
|
+
if materialization_fields.nil?
|
|
36
|
+
raise ArgumentError if config.on_invalid_input == :raise
|
|
37
|
+
|
|
38
|
+
return
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
base_query_state = QueryState.create(scope, materialization_fields)
|
|
42
|
+
filtered_query = Pipeline::Filtering.filter_query(base_query_state, spec, params&.filters)
|
|
43
|
+
sorted_query = Pipeline::Sorting.sort_query(filtered_query, spec, params&.sorts)
|
|
44
|
+
|
|
45
|
+
is_materialized = !sorted_query.unmaterialized?
|
|
46
|
+
has_sort = !Array(params&.sorts).empty?
|
|
47
|
+
|
|
48
|
+
if is_materialized
|
|
49
|
+
sorted_query.dataset.each_slice(batch_size) do |slice|
|
|
50
|
+
batch_state = sorted_query.with_dataset(slice)
|
|
51
|
+
yield Pipeline::Serializer.serialize_query(batch_state, spec, params&.fields)
|
|
52
|
+
end
|
|
53
|
+
elsif has_sort
|
|
54
|
+
offset = 0
|
|
55
|
+
loop do
|
|
56
|
+
relation = sorted_query.dataset.offset(offset).limit(batch_size)
|
|
57
|
+
batch_state = sorted_query.with_dataset(relation)
|
|
58
|
+
serialized = Pipeline::Serializer.serialize_query(batch_state, spec, params&.fields)
|
|
59
|
+
break if serialized.empty?
|
|
60
|
+
|
|
61
|
+
yield serialized
|
|
62
|
+
break if serialized.size < batch_size
|
|
63
|
+
|
|
64
|
+
offset += batch_size
|
|
65
|
+
end
|
|
66
|
+
else
|
|
67
|
+
sorted_query.dataset.in_batches(of: batch_size) do |relation|
|
|
68
|
+
batch_state = sorted_query.with_dataset(relation)
|
|
69
|
+
yield Pipeline::Serializer.serialize_query(batch_state, spec, params&.fields)
|
|
70
|
+
end
|
|
71
|
+
end
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
private
|
|
75
|
+
|
|
76
|
+
def resolve_parameters(input, config, spec)
|
|
77
|
+
return nil if input.nil?
|
|
78
|
+
return input if input.is_a?(Query::Parameters)
|
|
79
|
+
|
|
80
|
+
config.parameter_parser.parse_parameters(input, Prato::Query::FieldResolver.resolve_context(spec.field_lookup))
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def invalid_input_result(configuration, paginated)
|
|
84
|
+
raise ArgumentError if configuration.on_invalid_input == :raise
|
|
85
|
+
|
|
86
|
+
if paginated
|
|
87
|
+
{
|
|
88
|
+
entries: [],
|
|
89
|
+
totalCount: 0
|
|
90
|
+
}
|
|
91
|
+
else
|
|
92
|
+
[]
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def total_count(query_state)
|
|
97
|
+
if query_state.unmaterialized?
|
|
98
|
+
query_state.dataset.except(:select).count
|
|
99
|
+
else
|
|
100
|
+
query_state.dataset.count
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
end
|
|
105
|
+
end
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Prato
|
|
4
|
+
module Internal
|
|
5
|
+
class QueryState
|
|
6
|
+
attr_reader :dataset
|
|
7
|
+
|
|
8
|
+
def self.create(base_scope, materialization_fields)
|
|
9
|
+
dataset = base_scope.dup
|
|
10
|
+
|
|
11
|
+
new(dataset, nil, materialization_fields)
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def with_dataset(dataset)
|
|
15
|
+
self.class.new(dataset, @ruby_loaded_data, @materialization_fields)
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def unmaterialized?
|
|
19
|
+
!dataset.is_a?(Array)
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
def materialized_dataset(spec)
|
|
23
|
+
return [@dataset, @ruby_loaded_data] unless unmaterialized?
|
|
24
|
+
|
|
25
|
+
columns = spec.columns
|
|
26
|
+
scope = dataset
|
|
27
|
+
selects = Set.new([Arel.sql("#{scope.model.table_name}.*")])
|
|
28
|
+
association_load_values = []
|
|
29
|
+
|
|
30
|
+
@materialization_fields.each do |field|
|
|
31
|
+
column = columns[field]
|
|
32
|
+
|
|
33
|
+
case column
|
|
34
|
+
when Types::AggregateColumn, Types::ExpressionColumn
|
|
35
|
+
selects << column.select_node
|
|
36
|
+
when Types::AssociationColumn
|
|
37
|
+
association_load_values << association_path_to_association_load(column.association_path)
|
|
38
|
+
when Types::RubyColumn
|
|
39
|
+
association_load_values << column.includes if column.includes
|
|
40
|
+
|
|
41
|
+
loader = spec.ruby_loaders&.[](column.loader)
|
|
42
|
+
association_load_values << loader[:includes] if loader && loader[:includes]
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
if association_load_values.any?
|
|
47
|
+
scope = apply_association_loading(scope, association_load_values)
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
scope = scope.select(selects.to_a)
|
|
51
|
+
records = scope.to_a
|
|
52
|
+
|
|
53
|
+
ruby_loaded_data = nil
|
|
54
|
+
if spec.ruby_loaders&.any?
|
|
55
|
+
ruby_loaded_data = LazyLoaderCache.new(records)
|
|
56
|
+
spec.ruby_loaders.each { |key, loader| ruby_loaded_data[key] = loader[:block] }
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
@records = records
|
|
60
|
+
@ruby_loaded_data = ruby_loaded_data
|
|
61
|
+
[records, ruby_loaded_data]
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
private
|
|
65
|
+
|
|
66
|
+
if ActiveRecordVersion.legacy?
|
|
67
|
+
def apply_association_loading(scope, association_load_values)
|
|
68
|
+
scope.preload(*association_load_values)
|
|
69
|
+
end
|
|
70
|
+
else
|
|
71
|
+
def apply_association_loading(scope, association_load_values)
|
|
72
|
+
scope.includes(*association_load_values)
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
def association_path_to_association_load(path)
|
|
77
|
+
head, *tail = path
|
|
78
|
+
return head if tail.empty?
|
|
79
|
+
|
|
80
|
+
{ head => association_path_to_association_load(tail) }
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def initialize(dataset, ruby_loaded_data, materialization_fields)
|
|
84
|
+
@dataset = dataset
|
|
85
|
+
@ruby_loaded_data = ruby_loaded_data
|
|
86
|
+
@materialization_fields = materialization_fields
|
|
87
|
+
end
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
end
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Prato
|
|
4
|
+
module Internal
|
|
5
|
+
class Specification
|
|
6
|
+
attr_reader :columns, :visible_fields, :ruby_loaders, :field_lookup, :config
|
|
7
|
+
|
|
8
|
+
def initialize(columns:,
|
|
9
|
+
visible_fields:,
|
|
10
|
+
filterable_fields:,
|
|
11
|
+
sortable_fields:,
|
|
12
|
+
output_paths:,
|
|
13
|
+
field_lookup:,
|
|
14
|
+
ruby_loaders:,
|
|
15
|
+
config:)
|
|
16
|
+
@columns = columns
|
|
17
|
+
@visible_fields = visible_fields
|
|
18
|
+
@filterable_fields = filterable_fields
|
|
19
|
+
@sortable_fields = sortable_fields
|
|
20
|
+
@output_paths = output_paths
|
|
21
|
+
@field_lookup = field_lookup
|
|
22
|
+
@ruby_loaders = ruby_loaders
|
|
23
|
+
@config = config
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def validate_and_extract_materialization_fields(params)
|
|
27
|
+
return @visible_fields if params.nil?
|
|
28
|
+
|
|
29
|
+
fields = []
|
|
30
|
+
|
|
31
|
+
return nil unless collect_filter_fields(params.filters, fields)
|
|
32
|
+
return nil unless collect_sort_fields(params.sorts, fields)
|
|
33
|
+
return nil unless collect_display_fields(params.fields, fields)
|
|
34
|
+
|
|
35
|
+
fields.uniq
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def field_mapping(field_name)
|
|
39
|
+
@output_paths[field_name]
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
def sql_only?(display_fields)
|
|
43
|
+
display_fields.none? { |f| @columns[f].is_a?(Types::RubyColumn) }
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
private
|
|
47
|
+
|
|
48
|
+
def collect_filter_fields(filters, fields)
|
|
49
|
+
return true if filters.nil?
|
|
50
|
+
|
|
51
|
+
Array(filters).all? do |filter|
|
|
52
|
+
case filter
|
|
53
|
+
when Query::Filter
|
|
54
|
+
if @filterable_fields.include?(filter.field) && valid_filter_operator?(filter)
|
|
55
|
+
fields << filter.field
|
|
56
|
+
true
|
|
57
|
+
else
|
|
58
|
+
false
|
|
59
|
+
end
|
|
60
|
+
when Query::AndFilter, Query::OrFilter
|
|
61
|
+
collect_filter_fields(filter.filters, fields)
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
def collect_sort_fields(sorts, fields)
|
|
67
|
+
return true if sorts.nil?
|
|
68
|
+
|
|
69
|
+
Array(sorts).all? do |sort|
|
|
70
|
+
if @sortable_fields.include?(sort.field)
|
|
71
|
+
fields << sort.field
|
|
72
|
+
true
|
|
73
|
+
else
|
|
74
|
+
false
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
def collect_display_fields(display, fields)
|
|
80
|
+
if display.nil?
|
|
81
|
+
fields.concat(@visible_fields)
|
|
82
|
+
return true
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
Array(display).all? do |field|
|
|
86
|
+
if @visible_fields.include?(field)
|
|
87
|
+
fields << field
|
|
88
|
+
true
|
|
89
|
+
else
|
|
90
|
+
false
|
|
91
|
+
end
|
|
92
|
+
end
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def valid_filter_operator?(filter)
|
|
96
|
+
column_filter = @columns[filter.field].filter
|
|
97
|
+
!column_filter.is_a?(Array) || column_filter.include?(filter.operator)
|
|
98
|
+
end
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
end
|