forest_admin_datasource_mongoid 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/LICENSE +674 -0
- data/Rakefile +12 -0
- data/forest_admin_datasource_mongoid.gemspec +38 -0
- data/lib/forest_admin_datasource_mongoid/collection.rb +135 -0
- data/lib/forest_admin_datasource_mongoid/datasource.rb +125 -0
- data/lib/forest_admin_datasource_mongoid/options_parser.rb +79 -0
- data/lib/forest_admin_datasource_mongoid/parser/column.rb +86 -0
- data/lib/forest_admin_datasource_mongoid/parser/relation.rb +18 -0
- data/lib/forest_admin_datasource_mongoid/parser/validation.rb +87 -0
- data/lib/forest_admin_datasource_mongoid/utils/add_null_values.rb +56 -0
- data/lib/forest_admin_datasource_mongoid/utils/helpers.rb +151 -0
- data/lib/forest_admin_datasource_mongoid/utils/mongoid_serializer.rb +38 -0
- data/lib/forest_admin_datasource_mongoid/utils/pipeline/condition_generator.rb +30 -0
- data/lib/forest_admin_datasource_mongoid/utils/pipeline/filter_generator.rb +218 -0
- data/lib/forest_admin_datasource_mongoid/utils/pipeline/group_generator.rb +86 -0
- data/lib/forest_admin_datasource_mongoid/utils/pipeline/lookup_generator.rb +97 -0
- data/lib/forest_admin_datasource_mongoid/utils/pipeline/projection_generator.rb +20 -0
- data/lib/forest_admin_datasource_mongoid/utils/pipeline/reparent_generator.rb +97 -0
- data/lib/forest_admin_datasource_mongoid/utils/pipeline/virtual_field_generator.rb +78 -0
- data/lib/forest_admin_datasource_mongoid/utils/schema/fields_generator.rb +87 -0
- data/lib/forest_admin_datasource_mongoid/utils/schema/mongoid_schema.rb +196 -0
- data/lib/forest_admin_datasource_mongoid/utils/schema/relation_generator.rb +51 -0
- data/lib/forest_admin_datasource_mongoid/utils/version_manager.rb +13 -0
- data/lib/forest_admin_datasource_mongoid/version.rb +3 -0
- data/lib/forest_admin_datasource_mongoid.rb +11 -0
- metadata +119 -0
@@ -0,0 +1,151 @@
|
|
1
|
+
module ForestAdminDatasourceMongoid
|
2
|
+
module Utils
|
3
|
+
module Helpers
|
4
|
+
# Similar to projection.unnest
|
5
|
+
# @example
|
6
|
+
# unnest(['firstname', 'book.title', 'book.author'], 'book') == ['title', 'author']
|
7
|
+
def unnest(strings, prefix)
|
8
|
+
strings.select { |field| field.start_with?("#{prefix}.") }.map { |field| field[(prefix.size + 1)..] }
|
9
|
+
end
|
10
|
+
|
11
|
+
def escape(str)
|
12
|
+
str.tr('.', '_')
|
13
|
+
end
|
14
|
+
|
15
|
+
def recursive_set(target, path, value)
|
16
|
+
index = path.index('.')
|
17
|
+
if index.nil?
|
18
|
+
target[path] = value
|
19
|
+
else
|
20
|
+
prefix = path[0, index]
|
21
|
+
suffix = path[index + 1, path.length]
|
22
|
+
target[prefix] ||= {}
|
23
|
+
recursive_set(target[prefix], suffix, value)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def recursive_delete(target, path)
|
28
|
+
index = path.index('.')
|
29
|
+
|
30
|
+
if index.nil?
|
31
|
+
target.delete(path)
|
32
|
+
else
|
33
|
+
prefix = path[0..(index - 1)]
|
34
|
+
suffix = path[(index + 1)..]
|
35
|
+
|
36
|
+
if target.is_a?(Hash) && target.key?(prefix)
|
37
|
+
recursive_delete(target[prefix], suffix)
|
38
|
+
target.delete(prefix) if target[prefix].empty?
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
# not sure it this method is relevant for mongoid
|
44
|
+
def replace_mongo_types(data)
|
45
|
+
case data
|
46
|
+
when BSON::ObjectId, BSON::Decimal128
|
47
|
+
data.to_s
|
48
|
+
when Date, Time
|
49
|
+
data.iso8601
|
50
|
+
when Array
|
51
|
+
data.map { |item| replace_mongo_types(item) }
|
52
|
+
when Hash
|
53
|
+
data.transform_values { |value| replace_mongo_types(value) }
|
54
|
+
else
|
55
|
+
data
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
# Unflattend patches and records
|
60
|
+
def unflatten_record(record, as_fields, patch_mode: false)
|
61
|
+
new_record = record.dup
|
62
|
+
|
63
|
+
as_fields.each do |field|
|
64
|
+
alias_field = field.gsub('.', '@@@')
|
65
|
+
|
66
|
+
value = new_record[alias_field]
|
67
|
+
|
68
|
+
next if value.nil?
|
69
|
+
|
70
|
+
if patch_mode
|
71
|
+
new_record[field] = value
|
72
|
+
else
|
73
|
+
recursive_set(new_record, field, value)
|
74
|
+
end
|
75
|
+
|
76
|
+
new_record.delete(alias_field)
|
77
|
+
end
|
78
|
+
|
79
|
+
new_record
|
80
|
+
end
|
81
|
+
|
82
|
+
def reformat_patch(patch)
|
83
|
+
patch.each_with_object({}) do |(key, value), result|
|
84
|
+
keys = key.split('.')
|
85
|
+
last_key = keys.pop
|
86
|
+
nested_hash = keys.reverse.inject({ last_key => value }) do |hash, k|
|
87
|
+
{ k => hash }
|
88
|
+
end
|
89
|
+
deep_merge(result, nested_hash)
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
def deep_merge(target, source)
|
94
|
+
source.each do |key, value|
|
95
|
+
if target[key].is_a?(Hash) && value.is_a?(Hash)
|
96
|
+
deep_merge(target[key], value)
|
97
|
+
else
|
98
|
+
target[key] ||= value
|
99
|
+
end
|
100
|
+
end
|
101
|
+
target
|
102
|
+
end
|
103
|
+
|
104
|
+
# Compare two ids.
|
105
|
+
# This is useful to ensure we perform array operations in the right order.
|
106
|
+
#
|
107
|
+
# @example
|
108
|
+
# compareIds('a.20.a', 'a.1.b') => 1 (because 1 < 20)
|
109
|
+
# compareIds('a.0.a', 'b.1.b') => -1 (because 'a' < 'b')
|
110
|
+
def compare_ids(id_a, id_b)
|
111
|
+
parts_a = id_a.split('.')
|
112
|
+
parts_b = id_b.split('.')
|
113
|
+
length = [parts_a.length, parts_b.length].min
|
114
|
+
|
115
|
+
(0...length).each do |i|
|
116
|
+
# if both parts are numbers, we compare them numerically
|
117
|
+
result = if parts_a[i] =~ /^\d+$/ && parts_b[i] =~ /^\d+$/
|
118
|
+
parts_a[i].to_i <=> parts_b[i].to_i
|
119
|
+
else
|
120
|
+
# else, we compare as strings
|
121
|
+
parts_a[i] <=> parts_b[i]
|
122
|
+
end
|
123
|
+
return result unless result.zero?
|
124
|
+
end
|
125
|
+
|
126
|
+
parts_a.length <=> parts_b.length
|
127
|
+
end
|
128
|
+
|
129
|
+
def split_id(id)
|
130
|
+
dot_index = id.index('.')
|
131
|
+
root_id = id[0...dot_index]
|
132
|
+
path = id[(dot_index + 1)..]
|
133
|
+
|
134
|
+
root_id = BSON::ObjectId.from_string(root_id) if BSON::ObjectId.legal?(root_id)
|
135
|
+
|
136
|
+
[root_id, path]
|
137
|
+
end
|
138
|
+
|
139
|
+
def group_ids_by_path(ids)
|
140
|
+
updates = Hash.new { |hash, key| hash[key] = [] }
|
141
|
+
|
142
|
+
ids.each do |id|
|
143
|
+
root_id, path = split_id(id)
|
144
|
+
updates[path] << root_id
|
145
|
+
end
|
146
|
+
|
147
|
+
updates
|
148
|
+
end
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
@@ -0,0 +1,38 @@
|
|
1
|
+
module ForestAdminDatasourceMongoid
|
2
|
+
module Utils
|
3
|
+
MongoidSerializer = Struct.new(:object) do
|
4
|
+
def to_hash(projection)
|
5
|
+
hash_object(object, projection)
|
6
|
+
end
|
7
|
+
|
8
|
+
def hash_object(object, projection, with_associations: true)
|
9
|
+
hash = {}
|
10
|
+
|
11
|
+
return if object.nil?
|
12
|
+
|
13
|
+
object.attributes.slice(*projection.columns).each do |key, value|
|
14
|
+
hash[key] = value
|
15
|
+
end
|
16
|
+
|
17
|
+
if with_associations
|
18
|
+
each_association_collection(object, projection) do |association_name, item|
|
19
|
+
hash[association_name] = hash_object(
|
20
|
+
item,
|
21
|
+
projection.relations[association_name],
|
22
|
+
with_associations: projection.relations.key?(association_name)
|
23
|
+
)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
hash
|
28
|
+
end
|
29
|
+
|
30
|
+
def each_association_collection(object, projection)
|
31
|
+
one_associations = [Mongoid::Association::Referenced::HasOne, Mongoid::Association::Referenced::BelongsTo]
|
32
|
+
object.class.reflect_on_all_associations
|
33
|
+
.filter { |a| one_associations.include?(a.class) && projection.relations.key?(a.name.to_s) }
|
34
|
+
.each { |association| yield(association.name.to_s, object.send(association.name.to_s)) }
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
module ForestAdminDatasourceMongoid
|
2
|
+
module Utils
|
3
|
+
module Pipeline
|
4
|
+
class ConditionGenerator
|
5
|
+
include Utils::Schema
|
6
|
+
|
7
|
+
FOREST_RECORD_DOES_NOT_EXIST = 'FOREST_RECORD_DOES_NOT_EXIST'.freeze
|
8
|
+
|
9
|
+
def self.tag_record_if_not_exist(field, then_expr)
|
10
|
+
if_missing(field, then_expr, { FOREST_RECORD_DOES_NOT_EXIST => true })
|
11
|
+
end
|
12
|
+
|
13
|
+
def self.tag_record_if_not_exist_by_value(field, then_expr)
|
14
|
+
if_missing(field, then_expr, FOREST_RECORD_DOES_NOT_EXIST)
|
15
|
+
end
|
16
|
+
|
17
|
+
def self.if_missing(field, then_expr, else_expr)
|
18
|
+
{
|
19
|
+
'$cond' => {
|
20
|
+
'if' => { '$and' => [{ '$ne' => [{ '$type' => "$#{field}" }, 'missing'] },
|
21
|
+
{ '$ne' => ["$#{field}", nil] }] },
|
22
|
+
'then' => then_expr,
|
23
|
+
'else' => else_expr
|
24
|
+
}
|
25
|
+
}
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,218 @@
|
|
1
|
+
module ForestAdminDatasourceMongoid
|
2
|
+
module Utils
|
3
|
+
module Pipeline
|
4
|
+
class FilterGenerator
|
5
|
+
include ForestAdminDatasourceToolkit::Components::Query::ConditionTree
|
6
|
+
|
7
|
+
STRING_OPERATORS = [Operators::MATCH, Operators::NOT_CONTAINS, Operators::NOT_I_CONTAINS].freeze
|
8
|
+
|
9
|
+
def self.sort_and_paginate(model, filter)
|
10
|
+
sort = compute_sort(filter.sort)
|
11
|
+
|
12
|
+
sort_and_limit = []
|
13
|
+
|
14
|
+
sort_and_limit << { '$sort' => sort } if sort
|
15
|
+
|
16
|
+
if filter.page
|
17
|
+
sort_and_limit << { '$skip' => filter.page.offset }
|
18
|
+
sort_and_limit << { '$limit' => filter.page.limit }
|
19
|
+
end
|
20
|
+
|
21
|
+
return [sort_and_limit, [], []] unless sort
|
22
|
+
|
23
|
+
all_sort_criteria_native = sort.keys.none? do |key|
|
24
|
+
!model.fields.key?(key)
|
25
|
+
end
|
26
|
+
|
27
|
+
# if sort applies to native fields and no filters are applied (very common case)
|
28
|
+
# we apply pre-sort + limit at the beginning of the pipeline (to improve perf)
|
29
|
+
return [sort_and_limit, [], []] if all_sort_criteria_native && filter.condition_tree.nil?
|
30
|
+
|
31
|
+
all_condition_tree_keys_native = filter.condition_tree&.projection&.none? do |key|
|
32
|
+
!model.fields.key?(key)
|
33
|
+
end
|
34
|
+
|
35
|
+
# if filters apply to native fields only, we can apply the sort right after filtering
|
36
|
+
return [[], sort_and_limit, []] if all_sort_criteria_native && all_condition_tree_keys_native
|
37
|
+
|
38
|
+
# if sorting apply to relations, it is safer to do it at the end of the pipeline
|
39
|
+
[[], [], sort_and_limit]
|
40
|
+
end
|
41
|
+
|
42
|
+
def self.filter(model, stack, filter)
|
43
|
+
fields = []
|
44
|
+
tree = filter.condition_tree
|
45
|
+
match = compute_match(model, stack, tree, fields)
|
46
|
+
|
47
|
+
pipeline = []
|
48
|
+
pipeline << compute_fields(fields) unless fields.empty?
|
49
|
+
pipeline << { '$match' => match } if match
|
50
|
+
|
51
|
+
pipeline
|
52
|
+
end
|
53
|
+
|
54
|
+
def self.list_relations_used_in_filter(filter)
|
55
|
+
fields = Set.new
|
56
|
+
|
57
|
+
filter.sort&.each do |clause|
|
58
|
+
next unless clause[:field].include?(':') # only relations (fields containing ':')
|
59
|
+
|
60
|
+
list_paths(clause[:field]).each do |field|
|
61
|
+
parts = field.split('.') # Split into nested levels ("author.country.name"→ ["author", "country", "name"])
|
62
|
+
|
63
|
+
(1...parts.length).each do |i|
|
64
|
+
parent_field = parts[0...i].join('.')
|
65
|
+
fields.add(parent_field)
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
list_fields_used_in_filter_tree(filter.condition_tree, fields)
|
71
|
+
|
72
|
+
fields.to_a
|
73
|
+
end
|
74
|
+
|
75
|
+
def self.list_fields_used_in_filter_tree(condition_tree, fields)
|
76
|
+
if condition_tree.is_a? Nodes::ConditionTreeBranch
|
77
|
+
condition_tree.conditions.each { |condition| list_fields_used_in_filter_tree(condition, fields) }
|
78
|
+
elsif condition_tree&.field&.include?(':')
|
79
|
+
list_paths(condition_tree.field).each { |field| fields << field }
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def self.list_paths(field)
|
84
|
+
parts = field.split(':')
|
85
|
+
|
86
|
+
parts.slice(0..)&.map&.with_index { |_, index| parts.slice(0, index + 1).join('.') }
|
87
|
+
end
|
88
|
+
|
89
|
+
def self.compute_sort(sort)
|
90
|
+
return if sort.nil? || sort.empty?
|
91
|
+
|
92
|
+
result = {}
|
93
|
+
|
94
|
+
sort.each do |clause|
|
95
|
+
formatted_field = format_nested_field_path(clause[:field])
|
96
|
+
result[formatted_field] = clause[:ascending] ? 1 : -1
|
97
|
+
end
|
98
|
+
|
99
|
+
result
|
100
|
+
end
|
101
|
+
|
102
|
+
def self.format_nested_field_path(field)
|
103
|
+
field.tr(':', '.')
|
104
|
+
end
|
105
|
+
|
106
|
+
def self.compute_match(model, stack, tree, fields)
|
107
|
+
schema = Utils::Schema::MongoidSchema.from_model(model).apply_stack(stack, skip_as_models: true)
|
108
|
+
|
109
|
+
if tree.is_a? Nodes::ConditionTreeBranch
|
110
|
+
# to check
|
111
|
+
return {
|
112
|
+
"$#{tree.aggregator.downcase}" => tree.conditions.map do |condition|
|
113
|
+
compute_match(model, stack, condition, fields)
|
114
|
+
end
|
115
|
+
}
|
116
|
+
end
|
117
|
+
|
118
|
+
if tree.is_a? Nodes::ConditionTreeLeaf
|
119
|
+
value = format_and_cast_leaf_value(schema, tree, fields)
|
120
|
+
condition = build_match_condition(tree.operator, value)
|
121
|
+
|
122
|
+
return { format_nested_field_path(tree.field) => condition }
|
123
|
+
end
|
124
|
+
|
125
|
+
nil
|
126
|
+
end
|
127
|
+
|
128
|
+
def self.format_and_cast_leaf_value(schema, leaf, fields)
|
129
|
+
value = leaf.value
|
130
|
+
leaf = leaf.override(field: format_nested_field_path(leaf.field))
|
131
|
+
is_array, instance = get_field_metadata(schema, leaf.field)
|
132
|
+
|
133
|
+
if is_array
|
134
|
+
if instance == Date && value.is_a?(Array) && value.all? { |v| valid_iso_date?(v) }
|
135
|
+
value = value.map { |v| Date.parse(v) }
|
136
|
+
elsif instance == BSON::ObjectId && value.is_a?(Array) && value.all? { |v| BSON::ObjectId.legal?(v) }
|
137
|
+
value = value.map { |id| BSON::ObjectId.from_string(id) }
|
138
|
+
end
|
139
|
+
elsif instance == BSON::ObjectId
|
140
|
+
if STRING_OPERATORS.include?(leaf.operator)
|
141
|
+
fields << leaf.field
|
142
|
+
leaf.override(field: format_string_field_name(leaf.field))
|
143
|
+
elsif value.is_a?(Array) && value.all? { |v| BSON::ObjectId.legal?(v) }
|
144
|
+
value = value.map { |id| BSON::ObjectId.from_string(id) }
|
145
|
+
elsif BSON::ObjectId.legal?(value)
|
146
|
+
value = BSON::ObjectId.from_string(value)
|
147
|
+
end
|
148
|
+
elsif instance == Date && valid_iso_date?(value)
|
149
|
+
value = Date.parse(value)
|
150
|
+
end
|
151
|
+
|
152
|
+
value
|
153
|
+
end
|
154
|
+
|
155
|
+
def self.build_match_condition(operator, value)
|
156
|
+
case operator
|
157
|
+
when Operators::GREATER_THAN
|
158
|
+
{ '$gt' => value }
|
159
|
+
when Operators::LESS_THAN
|
160
|
+
{ '$lt' => value }
|
161
|
+
when Operators::EQUAL
|
162
|
+
{ '$eq' => value }
|
163
|
+
when Operators::NOT_EQUAL
|
164
|
+
{ '$ne' => value }
|
165
|
+
when Operators::IN
|
166
|
+
{ '$in' => value }
|
167
|
+
when Operators::INCLUDES_ALL
|
168
|
+
{ '$all' => value }
|
169
|
+
when Operators::NOT_CONTAINS
|
170
|
+
{ '$not' => Regexp.new("^.*#{value}.*$") }
|
171
|
+
when Operators::NOT_I_CONTAINS
|
172
|
+
{ '$not' => Regexp.new("^.*#{value}.*$", 'i') }
|
173
|
+
when Operators::MATCH
|
174
|
+
{ '$regex' => value }
|
175
|
+
when Operators::PRESENT
|
176
|
+
{ '$exists' => true, '$ne' => null }
|
177
|
+
else
|
178
|
+
raise ForestAdminDatasourceToolkit::Exceptions::ForestException, "Unsupported '#{operator}' operator"
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
def self.compute_fields(fields)
|
183
|
+
fields.reduce({ '$addFields' => {} }) do |computed, field|
|
184
|
+
string_field = format_string_field_name(field)
|
185
|
+
computed['$addField'][string_field] = { '$toString' => "$#{field}" }
|
186
|
+
end
|
187
|
+
end
|
188
|
+
|
189
|
+
def self.format_string_field_name(field)
|
190
|
+
parts = field.split('.')
|
191
|
+
parts << "string_#{parts.pop}"
|
192
|
+
|
193
|
+
parts.join('.')
|
194
|
+
end
|
195
|
+
|
196
|
+
def self.get_field_metadata(schema, field)
|
197
|
+
begin
|
198
|
+
sub_schema = schema.get_sub_schema(field)
|
199
|
+
is_array = sub_schema.is_array
|
200
|
+
instance = sub_schema.schema_type.type
|
201
|
+
rescue StandardError
|
202
|
+
is_array = false
|
203
|
+
instance = 'String'
|
204
|
+
end
|
205
|
+
|
206
|
+
[is_array, instance]
|
207
|
+
end
|
208
|
+
|
209
|
+
def self.valid_iso_date?(value)
|
210
|
+
DateTime.iso8601(value)
|
211
|
+
true
|
212
|
+
rescue ArgumentError
|
213
|
+
false
|
214
|
+
end
|
215
|
+
end
|
216
|
+
end
|
217
|
+
end
|
218
|
+
end
|
@@ -0,0 +1,86 @@
|
|
1
|
+
module ForestAdminDatasourceMongoid
|
2
|
+
module Utils
|
3
|
+
module Pipeline
|
4
|
+
class GroupGenerator
|
5
|
+
AGGREGATION_OPERATION = {
|
6
|
+
'Sum' => '$sum',
|
7
|
+
'Avg' => '$avg',
|
8
|
+
'Count' => '$sum',
|
9
|
+
'Max' => '$max',
|
10
|
+
'Min' => '$min'
|
11
|
+
}.freeze
|
12
|
+
|
13
|
+
GROUP_OPERATION = {
|
14
|
+
'Year' => '%Y-01-01',
|
15
|
+
'Month' => '%Y-%m-01',
|
16
|
+
'Day' => '%Y-%m-%d',
|
17
|
+
'Week' => '%Y-%m-%d'
|
18
|
+
}.freeze
|
19
|
+
|
20
|
+
def self.group(aggregation)
|
21
|
+
[
|
22
|
+
{
|
23
|
+
'$group' => {
|
24
|
+
_id: compute_groups(aggregation.groups),
|
25
|
+
value: compute_value(aggregation)
|
26
|
+
}
|
27
|
+
},
|
28
|
+
{
|
29
|
+
'$project' => {
|
30
|
+
'_id' => 0,
|
31
|
+
'value' => '$value',
|
32
|
+
'group' => compute_groups_projection(aggregation.groups)
|
33
|
+
}
|
34
|
+
}
|
35
|
+
]
|
36
|
+
end
|
37
|
+
|
38
|
+
class << self
|
39
|
+
private
|
40
|
+
|
41
|
+
def compute_value(aggregation)
|
42
|
+
# Handle count(*) case
|
43
|
+
return { '$sum' => 1 } if aggregation.field.nil?
|
44
|
+
|
45
|
+
# General case
|
46
|
+
field = "$#{aggregation.field.tr(":", ".")}"
|
47
|
+
|
48
|
+
if aggregation.operation == 'Count'
|
49
|
+
{ '$sum' => { '$cond' => [{ '$ne' => [field, nil] }, 1, 0] } }
|
50
|
+
else
|
51
|
+
{ AGGREGATION_OPERATION[aggregation.operation] => field }
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def compute_groups(groups)
|
56
|
+
return nil if groups.nil? || groups.empty?
|
57
|
+
|
58
|
+
groups.reduce({}) do |memo, group|
|
59
|
+
field = "$#{group[:field].tr(":", ".")}"
|
60
|
+
|
61
|
+
if group[:operation]
|
62
|
+
if group[:operation] == 'Week'
|
63
|
+
date = { '$dateTrunc' => { 'date' => field, 'startOfWeek' => 'Monday', 'unit' => 'week' } }
|
64
|
+
field = { '$dateToString' => { 'format' => GROUP_OPERATION[group[:operation]], 'date' => date } }
|
65
|
+
else
|
66
|
+
field = { '$dateToString' => { 'format' => GROUP_OPERATION[group[:operation]], 'date' => field } }
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
memo.merge(group[:field] => field)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
# Move fields in _id to the root of the document
|
75
|
+
def compute_groups_projection(groups)
|
76
|
+
return { '$literal' => {} } if groups.nil? || groups.empty?
|
77
|
+
|
78
|
+
groups.each_with_object({}) do |group, memo|
|
79
|
+
memo[group[:field]] = "$_id.#{group[:field]}"
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
@@ -0,0 +1,97 @@
|
|
1
|
+
module ForestAdminDatasourceMongoid
|
2
|
+
module Utils
|
3
|
+
module Pipeline
|
4
|
+
class LookupGenerator
|
5
|
+
include Utils::Schema
|
6
|
+
|
7
|
+
# Transform a forest admin projection into a mongo pipeline that performs the lookups
|
8
|
+
# and transformations to target them
|
9
|
+
def self.lookup(model, stack, projection, options)
|
10
|
+
schema_stack = stack.each_with_index.reduce([MongoidSchema.from_model(model)]) do |acc, (_, index)|
|
11
|
+
[
|
12
|
+
*acc,
|
13
|
+
MongoidSchema.from_model(model).apply_stack(stack.slice(0..(index + 1)), skip_as_models: true)
|
14
|
+
]
|
15
|
+
end
|
16
|
+
|
17
|
+
lookup_projection(nil, schema_stack.map(&:fields), projection, options)
|
18
|
+
end
|
19
|
+
|
20
|
+
def self.lookup_projection(current_path, schema_stack, projection, options)
|
21
|
+
pipeline = []
|
22
|
+
fields = {}
|
23
|
+
projection.relations.each do |name, relation_projection|
|
24
|
+
pipeline.push(*lookup_relation(current_path, schema_stack, name, relation_projection, options))
|
25
|
+
# pipeline = [*pipeline, *lookup_relation(current_path, schema_stack, name, relation_projection, options)]
|
26
|
+
fields.merge!(add_fields(name, relation_projection, options))
|
27
|
+
end
|
28
|
+
|
29
|
+
pipeline.push({ '$addFields' => fields }) unless fields.empty?
|
30
|
+
|
31
|
+
pipeline
|
32
|
+
end
|
33
|
+
|
34
|
+
def self.add_fields(name, projection, options)
|
35
|
+
return {} if options[:include] && !options[:include].include?(name)
|
36
|
+
return {} if options[:exclude]&.include?(name)
|
37
|
+
|
38
|
+
projection.filter { |field| field.include?('@@@') }
|
39
|
+
.map { |field_name| "#{name}.#{field_name.tr(":", ".")}" }
|
40
|
+
.each_with_object({}) do |curr, acc|
|
41
|
+
acc[curr] = "$#{curr.gsub("@@@", ".")}"
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def self.lookup_relation(current_path, schema_stack, name, projection, options)
|
46
|
+
models = ObjectSpace
|
47
|
+
.each_object(Class)
|
48
|
+
.select { |klass| klass < Mongoid::Document && klass.name && !klass.name.start_with?('Mongoid::') }
|
49
|
+
.to_h { |klass| [klass.name, klass] }
|
50
|
+
|
51
|
+
as = current_path ? "#{current_path}.#{name}" : name
|
52
|
+
|
53
|
+
last_schema = schema_stack[schema_stack.length - 1]
|
54
|
+
previous_schema = schema_stack.slice(0..(schema_stack.length - 1))
|
55
|
+
|
56
|
+
return {} if options[:include] && !options[:include].include?(as)
|
57
|
+
return {} if options[:exclude]&.include?(as)
|
58
|
+
|
59
|
+
# Native many to one relation
|
60
|
+
identifier = '__many_to_one'
|
61
|
+
if name.end_with?(identifier)
|
62
|
+
foreign_key_name = name[0..(name.length - identifier.length - 1)]
|
63
|
+
model = models[last_schema[foreign_key_name].options[:association].class_name]
|
64
|
+
|
65
|
+
from = model.name.gsub('::', '__')
|
66
|
+
local_field = current_path ? "#{current_path}.#{foreign_key_name}" : foreign_key_name
|
67
|
+
foreign_field = '_id'
|
68
|
+
sub_schema = MongoidSchema.from_model(model).fields
|
69
|
+
|
70
|
+
return [
|
71
|
+
# Push lookup to pipeline
|
72
|
+
{
|
73
|
+
'$lookup' => { 'from' => from, 'localField' => local_field, 'foreignField' => foreign_field,
|
74
|
+
'as' => as }
|
75
|
+
},
|
76
|
+
{ '$unwind' => { 'path' => "$#{as}", 'preserveNullAndEmptyArrays' => true } },
|
77
|
+
|
78
|
+
# Recurse to get relations of relations
|
79
|
+
*lookup_projection(as, [*schema_stack, sub_schema], projection, options)
|
80
|
+
]
|
81
|
+
end
|
82
|
+
|
83
|
+
# inverse of fake relation
|
84
|
+
if name == 'parent' && !previous_schema.empty?
|
85
|
+
return lookup_projection(as, previous_schema, projection, options)
|
86
|
+
end
|
87
|
+
|
88
|
+
# fake relation
|
89
|
+
return lookup_projection(as, [*schema_stack, last_schema[name]], projection, options) if last_schema[name]
|
90
|
+
|
91
|
+
# We should have handled all possible cases.
|
92
|
+
raise ForestAdminDatasourceToolkit::Exceptions::ForestException, "Unexpected relation: '#{name}'"
|
93
|
+
end
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
module ForestAdminDatasourceMongoid
|
2
|
+
module Utils
|
3
|
+
module Pipeline
|
4
|
+
class ProjectionGenerator
|
5
|
+
def self.project(projection)
|
6
|
+
return [{ '$replaceRoot' => { 'newRoot' => { '$literal' => {} } } }] if projection.empty?
|
7
|
+
|
8
|
+
project = { '_id' => false, 'FOREST_RECORD_DOES_NOT_EXIST' => true }
|
9
|
+
|
10
|
+
projection.each do |field|
|
11
|
+
formatted_field = field.tr(':', '.')
|
12
|
+
project[formatted_field] = true
|
13
|
+
end
|
14
|
+
|
15
|
+
[{ '$project' => project }]
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|