eco-helpers 3.0.29 → 3.0.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +36 -1
- data/eco-helpers.gemspec +1 -1
- data/lib/eco/api/common/people/entries.rb +6 -4
- data/lib/eco/api/common/people/entry_factory.rb +94 -39
- data/lib/eco/api/common/people/person_entry.rb +93 -26
- data/lib/eco/api/common/people/person_parser.rb +3 -1
- data/lib/eco/api/custom/parser.rb +1 -1
- data/lib/eco/api/organization/people.rb +1 -0
- data/lib/eco/api/session/config.rb +7 -2
- data/lib/eco/api/usecases/default/locations/cli/codes_to_tags_cli.rb +3 -3
- data/lib/eco/api/usecases/default/locations/cli/csv_to_tree_cli.rb +1 -1
- data/lib/eco/api/usecases/default/locations/cli/tagtree_extract_cli.rb +11 -11
- data/lib/eco/api/usecases/default/locations/cli/tagtree_paths_cli.rb +3 -3
- data/lib/eco/api/usecases/default/locations/cli/tagtree_upload_cli.rb +4 -4
- data/lib/eco/api/usecases/default/meta/cli/graphql_schema_cli.rb +3 -3
- data/lib/eco/api/usecases/default/meta/graphql_schema.rb +3 -3
- data/lib/eco/api/usecases/default/people/amend/cli/clean_unknown_tags_cli.rb +2 -2
- data/lib/eco/api/usecases/default/people/amend/cli/reinvite_sync_cli.rb +1 -1
- data/lib/eco/api/usecases/default/people/amend/cli/reinvite_trans_cli.rb +3 -3
- data/lib/eco/api/usecases/default/people/amend/cli/restore_db_cli.rb +12 -12
- data/lib/eco/api/usecases/default/people/amend/refresh_case.rb +2 -2
- data/lib/eco/api/usecases/default/people/amend/reinvite_sync_case.rb +1 -1
- data/lib/eco/api/usecases/default/people/amend/reinvite_trans_case.rb +2 -2
- data/lib/eco/api/usecases/default/people/treat/analyse_people_case.rb +2 -1
- data/lib/eco/api/usecases/default/people/treat/cli/analyse_people_cli.rb +25 -25
- data/lib/eco/api/usecases/default/people/treat/cli/org_data_convert_cli.rb +7 -7
- data/lib/eco/api/usecases/default/people/treat/cli/supers_cyclic_identify_cli.rb +4 -5
- data/lib/eco/api/usecases/default/people/treat/cli/supers_hierachy_cli.rb +4 -4
- data/lib/eco/api/usecases/default/people/treat/org_data_convert_case.rb +3 -3
- data/lib/eco/api/usecases/default/people/treat/supers_cyclic_identify_case.rb +15 -14
- data/lib/eco/api/usecases/default/people/treat/supers_hierarchy_case.rb +12 -12
- data/lib/eco/api/usecases/default/utils/cli/entries_to_csv_cli.rb +20 -0
- data/lib/eco/api/usecases/default/utils/cli/group_csv_cli.rb +3 -3
- data/lib/eco/api/usecases/default/utils/cli/json_to_csv_cli.rb +1 -1
- data/lib/eco/api/usecases/default/utils/cli/sort_csv_cli.rb +1 -1
- data/lib/eco/api/usecases/default/utils/cli/split_csv_cli.rb +5 -5
- data/lib/eco/api/usecases/default/utils/cli/split_json_cli.rb +2 -2
- data/lib/eco/api/usecases/default/utils/entries_to_csv_case.rb +39 -0
- data/lib/eco/api/usecases/default/utils/group_csv_case.rb +3 -3
- data/lib/eco/api/usecases/default/utils/sort_csv_case.rb +1 -1
- data/lib/eco/api/usecases/default/utils/split_csv_case.rb +4 -4
- data/lib/eco/api/usecases/default/utils/split_json_case.rb +8 -6
- data/lib/eco/api/usecases/default/utils.rb +1 -0
- data/lib/eco/api/usecases/default_cases.rb +0 -1
- data/lib/eco/api/usecases/graphql/samples/location/service/tree_diff/convertible/inputable.rb +4 -1
- data/lib/eco/api/usecases/lib/files/sftp.rb +5 -1
- data/lib/eco/cli/scripting/args_helpers.rb +3 -2
- data/lib/eco/cli_default/input.rb +1 -0
- data/lib/eco/cli_default/usecases.rb +0 -7
- data/lib/eco/data/files/helpers.rb +3 -0
- data/lib/eco/version.rb +1 -1
- metadata +6 -5
- data/lib/eco/api/usecases/default_cases/entries_to_csv_case.rb +0 -14
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: cc3166aa570a24f55a90822a79ad103419cbbd176677a5f17fe3ef8a27553cf6
|
4
|
+
data.tar.gz: f3418aa7bd4e8c645b60eeb839038a03da00fb4328a6acd63bb1500dfe18cfb8
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3ca74dcfcb34cc4b252c5613fcf3bf786eb85435920fae73b3ef24e293a6ce11f96698ff0ea7f16cd57e023ce9feee613eb2f8cb44e2352b501150098a6fc0cb
|
7
|
+
data.tar.gz: ad2f68c81259897a21267bbc914b6c79dda294191ce6eb9d293c080685be0bf829883b6a98c670e167602379a9af178bde80ec82f9fc0c390d48e801b7e96758
|
data/CHANGELOG.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
All notable changes to this project will be documented in this file.
|
4
4
|
|
5
|
-
## [3.0.
|
5
|
+
## [3.0.32] - 2025-04-xx
|
6
6
|
|
7
7
|
### Added
|
8
8
|
|
@@ -10,6 +10,41 @@ All notable changes to this project will be documented in this file.
|
|
10
10
|
|
11
11
|
### Fixed
|
12
12
|
|
13
|
+
## [3.0.31] - 2025-04-xx
|
14
|
+
|
15
|
+
### Added
|
16
|
+
|
17
|
+
### Changed
|
18
|
+
|
19
|
+
- upgraded `ecoportal-api-gem`
|
20
|
+
|
21
|
+
### Fixed
|
22
|
+
|
23
|
+
- `Eco::API::Common::People::PersonEntry#_parse_type`
|
24
|
+
- During a refactor the line that parses schema details fields that are `multiple` got removed. **Added** this parsing back in.
|
25
|
+
- `Eco::Data::Files#folder_files`
|
26
|
+
- prevent `nil` error
|
27
|
+
- Message of `Sftp` moving file to.
|
28
|
+
|
29
|
+
## [3.0.30] - 2025-04-03
|
30
|
+
|
31
|
+
### Added
|
32
|
+
|
33
|
+
- `Eco::API::Session::Config#locations_mapper`
|
34
|
+
- Added named argument `insensitive`
|
35
|
+
- `Eco::API::Common::People::PersonEntry#entry` shortcut, to obtain the data as is at a certain stage of the parsing process.
|
36
|
+
- `Eco::API::Common::People::Entries#export` **added** `parsing_phase` named argument to specify the data of what parsing stage should be used to generated the CSV.
|
37
|
+
- `-entries-to-csv` case
|
38
|
+
- added `-phase` option
|
39
|
+
|
40
|
+
### Changed
|
41
|
+
|
42
|
+
- **Moved** `EntriesToCsv` to `Default::Utils` space
|
43
|
+
|
44
|
+
### Fixed
|
45
|
+
|
46
|
+
- `SCR.get_file` check specified file existence, if `filename` provided, rather than if `required`.
|
47
|
+
|
13
48
|
## [3.0.29] - 2025-03-31
|
14
49
|
|
15
50
|
### Added
|
data/eco-helpers.gemspec
CHANGED
@@ -41,7 +41,7 @@ Gem::Specification.new do |spec|
|
|
41
41
|
spec.add_dependency 'bcrypt_pbkdf', '~> 1.0'
|
42
42
|
spec.add_dependency 'docx', '>= 0.8.0', '< 0.9'
|
43
43
|
spec.add_dependency 'dotenv', '~> 3'
|
44
|
-
spec.add_dependency 'ecoportal-api', '~> 0.10', '>= 0.10.
|
44
|
+
spec.add_dependency 'ecoportal-api', '~> 0.10', '>= 0.10.10'
|
45
45
|
spec.add_dependency 'ecoportal-api-graphql', '~> 0.4', '>= 0.4.5'
|
46
46
|
spec.add_dependency 'ecoportal-api-v2', '~> 2.0', '>= 2.0.16'
|
47
47
|
spec.add_dependency 'ed25519', '~> 1.2'
|
@@ -153,17 +153,19 @@ module Eco
|
|
153
153
|
newFrom to_a - discarded
|
154
154
|
end
|
155
155
|
|
156
|
-
#
|
156
|
+
# @todo could it somehow rather use the people-to-csv case?
|
157
157
|
# Helper to dump the entries into a CSV
|
158
158
|
# @param filename [String] the destination file
|
159
|
-
|
159
|
+
# @param parsing_phase [Symbol] data as per specific parsing stage.
|
160
|
+
def export(filename, parsing_phase: :internal)
|
160
161
|
header = each_with_object([]) do |entry, hds|
|
161
|
-
hds.push(*entry.
|
162
|
+
hds.push(*entry.entry(parsing_phase).keys).uniq!
|
162
163
|
end
|
164
|
+
|
163
165
|
CSV.open(filename, 'w') do |csv|
|
164
166
|
csv << header
|
165
167
|
each do |entry|
|
166
|
-
csv << entry.
|
168
|
+
csv << entry.entry(parsing_phase).values_at(*header)
|
167
169
|
end
|
168
170
|
end
|
169
171
|
end
|
@@ -2,13 +2,13 @@ module Eco
|
|
2
2
|
module API
|
3
3
|
module Common
|
4
4
|
module People
|
5
|
-
#
|
5
|
+
# @todo EntryFactory should suppport multiple schemas itself
|
6
6
|
# (rather that being done on `Session`)
|
7
7
|
# => currently, it's through session.entry_factory(schema: id),
|
8
8
|
# but this is wrong
|
9
9
|
# => This way, Entries and PersonEntry will be able to refer to attr_map
|
10
10
|
# and person_parser linked to schema_id
|
11
|
-
# =>
|
11
|
+
# => 'schema_id' should be an optional column in the input file,
|
12
12
|
# or parsable via a custom parser to scope the schema
|
13
13
|
# Helper factory class to generate entries (input entries).
|
14
14
|
# @attr_reader schema [Ecoportal::API::V1::PersonSchema] person schema to
|
@@ -26,7 +26,7 @@ module Eco
|
|
26
26
|
# set of attribute, type and format parsers/serializers.
|
27
27
|
# @param attr_map [nil, Eco::Data::Mapper] attribute names mapper
|
28
28
|
# to translate external names into internal ones and _vice versa_.
|
29
|
-
def initialize(e, schema:, person_parser: nil, default_parser: nil, attr_map: nil)
|
29
|
+
def initialize(e, schema:, person_parser: nil, default_parser: nil, attr_map: nil) # rubocop:disable Naming/MethodParameterName
|
30
30
|
super(e)
|
31
31
|
|
32
32
|
msg = "Constructor needs a PersonSchema. Given: #{schema.class}"
|
@@ -42,12 +42,18 @@ module Eco
|
|
42
42
|
@source_person_parser = person_parser
|
43
43
|
|
44
44
|
# load default parser + custom parsers
|
45
|
-
@default_parser =
|
45
|
+
@default_parser =
|
46
|
+
default_parser&.new(schema: @schema) ||
|
47
|
+
Eco::API::Common::People::DefaultParsers.new(schema: @schema)
|
48
|
+
|
46
49
|
base_parser = @default_parser.merge(@source_person_parser)
|
50
|
+
|
47
51
|
# new parser with linked schema
|
48
|
-
@person_parser = @source_person_parser.
|
52
|
+
@person_parser = @source_person_parser.
|
53
|
+
new(schema: @schema).
|
54
|
+
merge(base_parser)
|
49
55
|
@person_parser_patch_version = @source_person_parser.patch_version
|
50
|
-
@attr_map
|
56
|
+
@attr_map = attr_map
|
51
57
|
end
|
52
58
|
|
53
59
|
def newFactory(schema: nil) # rubocop:disable Naming/MethodName
|
@@ -60,18 +66,22 @@ module Eco
|
|
60
66
|
)
|
61
67
|
end
|
62
68
|
|
63
|
-
#
|
64
|
-
#
|
65
|
-
# @
|
69
|
+
# Provides with a Eco::API::Common::People::PersonParser object
|
70
|
+
# (collection of attribute parsers)
|
71
|
+
# @note if the custom person parser has changed, it updates the copy of
|
72
|
+
# this EntryFactory instance
|
73
|
+
# @return [Eco::API::Common::People::PersonParser] set of attribute,
|
74
|
+
# type and format parsers/serializers.
|
66
75
|
def person_parser
|
67
76
|
if @person_parser_patch_version < @source_person_parser.patch_version
|
68
77
|
@person_parser.merge(@source_person_parser)
|
69
78
|
@person_parser_patch_version = @source_person_parser.patch_version
|
70
79
|
end
|
80
|
+
|
71
81
|
@person_parser
|
72
82
|
end
|
73
83
|
|
74
|
-
#
|
84
|
+
# Key method to generate objects of `PersonEntry` that share dependencies
|
75
85
|
# via this `EntryFactory` environment.
|
76
86
|
# @note this method is necessary to make the factory object work
|
77
87
|
# as a if it was a class `PersonEntry` you can call `new` on.
|
@@ -97,17 +107,26 @@ module Eco
|
|
97
107
|
# - if the `format:` you provide is not a `Symbol`.
|
98
108
|
# - if there is no _parser/serializer_ defined for `format:`.
|
99
109
|
# @param data [Array<Hash>] data to be parsed. It cannot be used alongside with `file:`
|
100
|
-
# @param file [String] absolute or relative path to the input file. It
|
101
|
-
#
|
110
|
+
# @param file [String] absolute or relative path to the input file. It
|
111
|
+
# cannot be used alongside with `data:`.
|
112
|
+
# @param format [Symbol] it must be used when you use the option `file:`
|
113
|
+
# (i.e. `:xml`, `:csv`), as it specifies the format of the input `file:`.
|
102
114
|
# @param options [Hash] further options.
|
103
|
-
# @option options [String] :encoding optional parameter to read `file:` by
|
104
|
-
#
|
115
|
+
# @option options [String] :encoding optional parameter to read `file:` by
|
116
|
+
# expecting certain encoding.
|
117
|
+
# @option options [Boolean] :check_headers signals if the `csv` file headers
|
118
|
+
# should be expected.
|
105
119
|
# @return [Eco::API::Common::People::Entries] collection of `Eco::API::Common::People::PersonEntry`.
|
106
|
-
def entries(
|
107
|
-
|
120
|
+
def entries(
|
121
|
+
data: (no_data = true; nil), # rubocop:disable Style/Semicolon
|
122
|
+
file: (no_file = true; nil), # rubocop:disable Style/Semicolon
|
123
|
+
format: (no_format = true; nil), # rubocop:disable Style/Semicolon
|
124
|
+
**options
|
125
|
+
)
|
126
|
+
msg = 'You should at least use data: or file:, but not both'
|
108
127
|
fatal msg if no_data == no_file
|
109
128
|
|
110
|
-
msg =
|
129
|
+
msg = 'You must specify a valid format: (symbol) when you use file.'
|
111
130
|
fatal msg if file && no_format
|
112
131
|
|
113
132
|
msg = "Format should be a Symbol. Given '#{format}'"
|
@@ -120,25 +139,40 @@ module Eco
|
|
120
139
|
options.merge!(file: file) unless no_file
|
121
140
|
options.merge!(format: format) unless no_format
|
122
141
|
|
123
|
-
Entries.new(
|
142
|
+
Entries.new(
|
143
|
+
to_array_of_hashes(**options),
|
144
|
+
klass: PersonEntry,
|
145
|
+
factory: self
|
146
|
+
)
|
124
147
|
end
|
125
148
|
|
126
|
-
|
127
|
-
|
149
|
+
# Input file format parsing to obtain the final processing model.
|
150
|
+
# @note This only takes care of the file format parsing alone (i.e. csv, json).
|
151
|
+
# @note
|
152
|
+
# 1. The **parsing** of the file happens here.
|
153
|
+
# 2. It also adds a row index (idx column).
|
154
|
+
# @return [Array<Hash>] the parsed content.
|
155
|
+
def to_array_of_hashes(**kargs) # rubocop:disable Metrics/AbcSize, Metrics/MethodLength
|
156
|
+
content, file, encoding, format = kargs.values_at(
|
157
|
+
:content, :file, :encoding, :format
|
158
|
+
)
|
128
159
|
|
129
|
-
# Support for multiple file
|
160
|
+
# Support for multiple file (iterated self call)
|
130
161
|
if file.is_a?(Array)
|
131
162
|
return file.each_with_object([]) do |f, out|
|
132
163
|
log(:info) { "Parsing file '#{f}'" }
|
133
|
-
curr = to_array_of_hashes(**kargs
|
164
|
+
curr = to_array_of_hashes(**kargs, file: f)
|
134
165
|
out.concat(curr)
|
135
166
|
end
|
136
167
|
end
|
137
168
|
|
138
|
-
# Get content only when it's not :xls, nor :json
|
139
|
-
# note: even if content was provided, file takes precedence
|
140
|
-
if get_content?(format)
|
141
|
-
content = get_file_content(
|
169
|
+
# Get content only when it's not :xls, nor :json (so when is :csv).
|
170
|
+
# @note: even if content was provided, file takes precedence
|
171
|
+
if file && get_content?(format)
|
172
|
+
content = get_file_content(
|
173
|
+
file,
|
174
|
+
encoding: encoding
|
175
|
+
)
|
142
176
|
end
|
143
177
|
|
144
178
|
case content
|
@@ -148,15 +182,24 @@ module Eco
|
|
148
182
|
}
|
149
183
|
exit(1)
|
150
184
|
when String
|
151
|
-
deps
|
152
|
-
|
185
|
+
deps ||= {}
|
186
|
+
deps = deps.merge(**kargs.slice(:check_headers))
|
187
|
+
|
188
|
+
to_array_of_hashes(
|
189
|
+
content: person_parser.parse(
|
190
|
+
format,
|
191
|
+
content,
|
192
|
+
deps: deps
|
193
|
+
)
|
194
|
+
)
|
153
195
|
when Enumerable
|
154
196
|
sample = content.to_a.first
|
197
|
+
|
155
198
|
case sample
|
156
199
|
when Hash, Array, ::CSV::Row
|
157
200
|
Eco::CSV::Table.new(content).to_array_of_hashes
|
158
201
|
when NilClass
|
159
|
-
abort(
|
202
|
+
abort('There is NO input data')
|
160
203
|
else
|
161
204
|
abort("Input content 'Array' of '#{sample.class}' is not supported.")
|
162
205
|
end
|
@@ -170,9 +213,10 @@ module Eco
|
|
170
213
|
start_from_two = (format == :csv) || format == :xls
|
171
214
|
first_idx = start_from_two ? 2 : 1
|
172
215
|
|
216
|
+
# add row index column
|
173
217
|
out_array.each.with_index(first_idx) do |entry_hash, idx|
|
174
|
-
entry_hash[
|
175
|
-
entry_hash[
|
218
|
+
entry_hash['idx'] = idx
|
219
|
+
entry_hash['source_file'] = file
|
176
220
|
end
|
177
221
|
end
|
178
222
|
end
|
@@ -189,11 +233,17 @@ module Eco
|
|
189
233
|
# There must be a parser/serializer defined for it.
|
190
234
|
# @param encoding [String] optional parameter to geneate `file:` content by unsing certain encoding.
|
191
235
|
# @return [Void].
|
192
|
-
def export(
|
236
|
+
def export(
|
237
|
+
data:,
|
238
|
+
file: 'export',
|
239
|
+
format: :csv,
|
240
|
+
encoding: 'utf-8',
|
241
|
+
internal_names: false
|
242
|
+
)
|
193
243
|
msg = "data: Expected Eco::API::Organization::People object. Given: #{data.class}"
|
194
244
|
fatal msg unless data.is_a?(Eco::API::Organization::People)
|
195
245
|
|
196
|
-
fatal
|
246
|
+
fatal 'A file should be specified.' if file.to_s.strip.empty?
|
197
247
|
fatal "Format should be a Symbol. Given '#{format}'" if format && !format.is_a?(Symbol)
|
198
248
|
|
199
249
|
msg = "There is no parser/serializer for format ':#{format}'"
|
@@ -202,25 +252,30 @@ module Eco
|
|
202
252
|
run = true
|
203
253
|
if self.class.file_exists?(file)
|
204
254
|
prompt_user(
|
205
|
-
|
255
|
+
'Do you want to overwrite it? (Y/n):',
|
206
256
|
explanation: "The file '#{file}' already exists.",
|
207
|
-
default:
|
257
|
+
default: 'Y'
|
208
258
|
) do |response|
|
209
|
-
run = (response ==
|
259
|
+
run = (response == '') || response.upcase.start_with?('Y')
|
210
260
|
end
|
211
261
|
end
|
212
262
|
|
213
263
|
return unless run
|
214
264
|
|
215
|
-
deps = {
|
265
|
+
deps = {'supervisor_id' => {people: data}}
|
216
266
|
data_entries = data.map do |person|
|
217
267
|
new(person, dependencies: deps).then do |entry|
|
218
268
|
internal_names ? entry.mapped_entry : entry.external_entry
|
219
269
|
end
|
220
270
|
end
|
221
271
|
|
222
|
-
File.open(file,
|
223
|
-
fd.write(
|
272
|
+
File.open(file, 'w', enconding: encoding) do |fd|
|
273
|
+
fd.write(
|
274
|
+
person_parser.serialize(
|
275
|
+
format,
|
276
|
+
data_entries
|
277
|
+
)
|
278
|
+
)
|
224
279
|
end
|
225
280
|
end
|
226
281
|
|
@@ -70,6 +70,24 @@ module Eco
|
|
70
70
|
)
|
71
71
|
end
|
72
72
|
|
73
|
+
# @return [Hash] the data as of at that parsing stage.
|
74
|
+
def entry(phase = :external)
|
75
|
+
phase = phase.to_sym
|
76
|
+
|
77
|
+
case phase
|
78
|
+
when :external
|
79
|
+
external_entry
|
80
|
+
when :mapped
|
81
|
+
mapped_entry
|
82
|
+
when :internal
|
83
|
+
internal_entry
|
84
|
+
when :final
|
85
|
+
final_entry
|
86
|
+
else
|
87
|
+
raise ArgumentError, "Unknown phase '#{phase}'"
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
73
91
|
# @note completely serialized entry.
|
74
92
|
# @return [Hash] entry `Hash` with **external** attribute names, and values and types thereof.
|
75
93
|
def external_entry # rubocop:disable Style/TrivialAccessors
|
@@ -77,12 +95,15 @@ module Eco
|
|
77
95
|
end
|
78
96
|
|
79
97
|
# @note just one step away from being completely parsed (only types parsing pending).
|
80
|
-
# @return [Hash] entry `Hash` with **internal** attribute names and values,
|
98
|
+
# @return [Hash] entry `Hash` with **internal** attribute names and values,
|
99
|
+
# but **external** types.
|
81
100
|
def internal_entry # rubocop:disable Style/TrivialAccessors
|
82
101
|
@internal_entry
|
83
102
|
end
|
84
103
|
|
85
|
-
# @
|
104
|
+
# @note at this stage, only the header names have been converted/transformed.
|
105
|
+
# @return [Hash] entry `Hash` with **internal** attribute names,
|
106
|
+
# but **external** types and values.
|
86
107
|
def mapped_entry # rubocop:disable Style/TrivialAccessors
|
87
108
|
@mapped_entry
|
88
109
|
end
|
@@ -204,20 +225,21 @@ module Eco
|
|
204
225
|
# @return [String] string summary of this person identity.
|
205
226
|
def identify
|
206
227
|
str_id = id ? "id: '#{id}'; " : ''
|
207
|
-
"(row: #{idx}) '#{name}'
|
228
|
+
str = "(row: #{idx}) '#{name}' "
|
229
|
+
str << "(#{str_id}ext_id: '#{external_id}'; "
|
230
|
+
str << "email: '#{email}')"
|
231
|
+
str
|
208
232
|
end
|
209
233
|
|
210
234
|
# Provides a reference of this person entry.
|
211
235
|
# @return [String] string summary of this person identity.
|
212
236
|
def to_s(options)
|
213
237
|
options = into_a(options)
|
214
|
-
if options.include?(:identify)
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
end.join(' | ')
|
220
|
-
end
|
238
|
+
return identify if options.include?(:identify)
|
239
|
+
|
240
|
+
final_entry.each.map do |k, v|
|
241
|
+
"'#{k}': '#{v.to_json}'"
|
242
|
+
end.join(' | ')
|
221
243
|
end
|
222
244
|
|
223
245
|
# Setter to fill in all the `core` properties of the `Person` that are present in the `Entry`.
|
@@ -230,6 +252,7 @@ module Eco
|
|
230
252
|
def set_core(person, exclude: nil)
|
231
253
|
scoped_attrs = @emap.core_attrs(@final_entry) - into_a(exclude)
|
232
254
|
scoped_attrs -= ['id']
|
255
|
+
|
233
256
|
@final_entry.slice(*scoped_attrs).each do |attr, value|
|
234
257
|
set_part(person, attr, value)
|
235
258
|
rescue StandardError => e
|
@@ -248,6 +271,7 @@ module Eco
|
|
248
271
|
def set_account(person, exclude: nil)
|
249
272
|
person.account = {} unless person.account
|
250
273
|
scoped_attrs = @emap.account_attrs(@final_entry) - into_a(exclude)
|
274
|
+
|
251
275
|
@final_entry.slice(*scoped_attrs).each do |attr, value|
|
252
276
|
set_part(person.account, attr, value)
|
253
277
|
end
|
@@ -262,6 +286,7 @@ module Eco
|
|
262
286
|
def set_details(person, exclude: nil)
|
263
287
|
person.add_details(@person_parser.schema) if !person.details || !person.details.schema_id
|
264
288
|
scoped_attrs = @emap.details_attrs(@final_entry) - into_a(exclude)
|
289
|
+
|
265
290
|
@final_entry.slice(*scoped_attrs).each do |attr, value|
|
266
291
|
set_part(person.details, attr, value)
|
267
292
|
end
|
@@ -272,24 +297,28 @@ module Eco
|
|
272
297
|
# @return [Hash] entry in raw: that with **external** names, values and types.
|
273
298
|
def __external_entry(data)
|
274
299
|
return data if parsing?
|
300
|
+
|
275
301
|
_external_serializing(data)
|
276
302
|
end
|
277
303
|
|
278
304
|
# @return [Hash] that with **internal** names but **external** values and types.
|
279
305
|
def __mapped_entry(data)
|
280
306
|
return _mapped_parsing(data) if parsing?
|
307
|
+
|
281
308
|
_mapped_serializing(data)
|
282
309
|
end
|
283
310
|
|
284
311
|
# @return [Hash] that with **internal** names and values, but **external** values and types.
|
285
312
|
def __internal_entry(data)
|
286
313
|
return _internal_parsing(data) if parsing?
|
314
|
+
|
287
315
|
_internal_serializing(data)
|
288
316
|
end
|
289
317
|
|
290
318
|
# @return [Hash] that with **internal** names, values and types.
|
291
319
|
def __final_entry(data)
|
292
320
|
return _final_parsing(data) if parsing?
|
321
|
+
|
293
322
|
_final_serializing(data)
|
294
323
|
end
|
295
324
|
|
@@ -302,10 +331,11 @@ module Eco
|
|
302
331
|
rest_keys = mapped_entry.keys - target_attrs
|
303
332
|
target_attrs -= ['send_invites']
|
304
333
|
external_entry = target_attrs.each_with_object({}) do |attr, hash|
|
305
|
-
|
306
|
-
|
307
|
-
|
334
|
+
next if hash.key?(ext_attr = @emap.to_external(attr))
|
335
|
+
|
336
|
+
hash[ext_attr] = mapped_entry[attr]
|
308
337
|
end
|
338
|
+
|
309
339
|
merge_missing_attrs(external_entry, mapped_entry.slice(*rest_keys))
|
310
340
|
end
|
311
341
|
|
@@ -317,6 +347,7 @@ module Eco
|
|
317
347
|
mapped_hash = @emap.aliased_attrs.each_with_object({}) do |attr, hash|
|
318
348
|
hash[attr] = external_entry[@emap.to_external(attr)]
|
319
349
|
end
|
350
|
+
|
320
351
|
external_entry.slice(*@emap.direct_attrs).merge(mapped_hash)
|
321
352
|
end
|
322
353
|
|
@@ -327,9 +358,12 @@ module Eco
|
|
327
358
|
# @param internal_entry [Hash] entry with **internal** names and values, but **external** types.
|
328
359
|
# @return [Hash] entry with **internal** names and **external** values and types.
|
329
360
|
def _mapped_serializing(internal_entry)
|
330
|
-
mapped_hash = internal_entry.merge(
|
361
|
+
mapped_hash = internal_entry.merge(
|
362
|
+
_serialize_values(internal_entry, :internal)
|
363
|
+
)
|
331
364
|
model_attrs = @person_parser.all_model_attrs - ['send_invites']
|
332
365
|
aux_hash = mapped_hash.slice(*model_attrs)
|
366
|
+
|
333
367
|
merge_missing_attrs(aux_hash, mapped_hash)
|
334
368
|
end
|
335
369
|
|
@@ -345,14 +379,19 @@ module Eco
|
|
345
379
|
# @param final_entry [Hash] the entry with all _internal_ (attributes, values and types)
|
346
380
|
# @return [Hash] the `internal entry` with the **internal** attributes names and values, but external types.
|
347
381
|
def _internal_serializing(final_entry)
|
348
|
-
final_entry = final_entry.merge(
|
382
|
+
final_entry = final_entry.merge(
|
383
|
+
_serialize_values(final_entry, :final)
|
384
|
+
)
|
349
385
|
core_account = @person_parser.target_attrs_account + @person_parser.target_attrs_core
|
386
|
+
|
350
387
|
core_account_hash = core_account.reduce({}) do |hash, attr|
|
351
388
|
hash.merge(hash_attr(attr, _serialize_type(attr, final_entry[attr])))
|
352
389
|
end
|
390
|
+
|
353
391
|
details_hash = @person_parser.target_attrs_details.reduce({}) do |hash, attr|
|
354
392
|
hash.merge(hash_attr(attr, _serialize_type(attr, final_entry[attr], schema: @person_parser.schema)))
|
355
393
|
end
|
394
|
+
|
356
395
|
merging(core_account_hash, details_hash) do |internal_entry|
|
357
396
|
merge_missing_attrs(internal_entry, final_entry)
|
358
397
|
end
|
@@ -392,21 +431,31 @@ module Eco
|
|
392
431
|
core_hash = @person_parser.target_attrs_core.reduce({}) do |hash, attr|
|
393
432
|
hash.merge(hash_attr(attr, get_part(person, attr)))
|
394
433
|
end
|
434
|
+
|
395
435
|
account_hash = @person_parser.target_attrs_account.reduce({}) do |hash, attr|
|
396
436
|
hash.merge(hash_attr(attr, get_part(person.account, attr)))
|
397
437
|
end
|
438
|
+
|
398
439
|
details_hash = @person_parser.target_attrs_details.reduce({}) do |hash, attr|
|
399
440
|
hash.merge(hash_attr(attr, get_part(person.details, attr)))
|
400
441
|
end
|
442
|
+
|
401
443
|
merging(core_hash, account_hash, details_hash) do |final_entry|
|
402
444
|
final_entry['Has account?'] = !person.account.nil?
|
403
|
-
final_entry.merge(
|
445
|
+
final_entry.merge(
|
446
|
+
_serialize_values(person, :person)
|
447
|
+
)
|
404
448
|
end
|
405
449
|
end
|
406
450
|
|
407
451
|
# HELPERS
|
452
|
+
|
408
453
|
def _serialize_values(entry, phase = :person)
|
409
|
-
@person_parser.active_attrs(
|
454
|
+
@person_parser.active_attrs(
|
455
|
+
entry,
|
456
|
+
phase,
|
457
|
+
process: :serialize
|
458
|
+
).each_with_object({}) do |attr, hash|
|
410
459
|
data = entry.is_a?(Hash)? entry.merge(hash) : entry
|
411
460
|
serial_attr = @person_parser.serialize(attr, data, phase, deps: @deps[attr] || {})
|
412
461
|
hash.merge!(hash_attr(attr, serial_attr))
|
@@ -414,7 +463,10 @@ module Eco
|
|
414
463
|
end
|
415
464
|
|
416
465
|
def _parse_values(entry, phase = :internal)
|
417
|
-
@person_parser.active_attrs(
|
466
|
+
@person_parser.active_attrs(
|
467
|
+
entry,
|
468
|
+
phase
|
469
|
+
).each_with_object({}) do |attr, hash|
|
418
470
|
parsed_attr = @person_parser.parse(attr, entry.merge(hash), phase)
|
419
471
|
hash.merge!(hash_attr(attr, parsed_attr))
|
420
472
|
end
|
@@ -423,13 +475,19 @@ module Eco
|
|
423
475
|
# Transforms each **typed** value into its `String` version
|
424
476
|
def _serialize_type(attr, value, schema: nil)
|
425
477
|
if !!schema
|
426
|
-
|
427
|
-
|
428
|
-
|
478
|
+
msg = "Field '#{attr}' does not exist in details of schema: '#{schema.name}'"
|
479
|
+
fatal(msg) unless (field = schema[attr])
|
480
|
+
|
429
481
|
value = @person_parser.serialize(:multiple, value) if field.multiple
|
482
|
+
|
430
483
|
if @person_parser.defined?(field.type.to_sym)
|
431
|
-
value = @person_parser.serialize(
|
484
|
+
value = @person_parser.serialize(
|
485
|
+
field.type.to_sym,
|
486
|
+
value,
|
487
|
+
deps: {'attr' => attr}
|
488
|
+
)
|
432
489
|
end
|
490
|
+
|
433
491
|
value
|
434
492
|
elsif %w[policy_group_ids filter_tags login_provider_ids starred_ids].include?(attr)
|
435
493
|
@person_parser.serialize(:multiple, value)
|
@@ -446,15 +504,21 @@ module Eco
|
|
446
504
|
def _parse_type(attr, value, schema: nil) # rubocop:disable Metrics/AbcSize
|
447
505
|
value = value.strip if value.is_a?(String)
|
448
506
|
value = nil if value.to_s.strip.empty?
|
507
|
+
|
449
508
|
if !!schema
|
450
|
-
|
451
|
-
|
452
|
-
|
509
|
+
msg = "Field '#{attr}' does not exist in details of schema: '#{schema.name}'"
|
510
|
+
fatal(msg) unless (field = schema[attr])
|
511
|
+
|
453
512
|
value = @person_parser.parse(:multiple, value) if field.multiple
|
454
513
|
|
455
514
|
if @person_parser.defined?(field.type.to_sym)
|
456
|
-
value = @person_parser.parse(
|
515
|
+
value = @person_parser.parse(
|
516
|
+
field.type.to_sym,
|
517
|
+
value,
|
518
|
+
deps: {'attr' => attr}
|
519
|
+
)
|
457
520
|
end
|
521
|
+
|
458
522
|
value
|
459
523
|
elsif attr == 'email'
|
460
524
|
value = value.strip.downcase if value
|
@@ -480,6 +544,7 @@ module Eco
|
|
480
544
|
merged = sorted_keys.each_with_object({}) do |k, h|
|
481
545
|
h[k] = rev_hash[k]
|
482
546
|
end
|
547
|
+
|
483
548
|
merged = yield(merged) if block_given?
|
484
549
|
merged
|
485
550
|
end
|
@@ -498,6 +563,7 @@ module Eco
|
|
498
563
|
|
499
564
|
def get_part(obj, attr)
|
500
565
|
return unless obj
|
566
|
+
|
501
567
|
case obj
|
502
568
|
when Ecoportal::API::V1::PersonDetails
|
503
569
|
#unless field = obj.get_field(attr)
|
@@ -529,6 +595,7 @@ module Eco
|
|
529
595
|
# @return [Hash] `value` if it was a `Hash`, and `{ attr => value}` otherwise
|
530
596
|
def hash_attr(attr, value)
|
531
597
|
return value if value.is_a?(Hash)
|
598
|
+
|
532
599
|
{ attr => value }
|
533
600
|
end
|
534
601
|
|