hash_mapper 0.2.3 → 0.2.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/README.md +15 -15
- data/hash_mapper.gemspec +4 -4
- data/lib/hash_mapper.rb +17 -16
- data/lib/hash_mapper/version.rb +1 -1
- data/spec/hash_mapper_spec.rb +185 -160
- data/spec/spec_helper.rb +1 -1
- metadata +8 -9
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: e29d4658750442de3a4cfff45d3d1d68918a8afe642eb7e3f4c6563363d56523
|
4
|
+
data.tar.gz: 74ce329715482ba68d540e2934ed35a2c350b31cdc3541ff0811c798d819fcde
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f38ea8ead433e22f77877b3ca1f00f6ad21480c3c534ad1de2aae5a0039976085a6beb7e23ad5e5bf57a526208977edf785c34009c3e51c551a69b27d9f4ff61
|
7
|
+
data.tar.gz: 7c2a62a3e9629b3db6899b9359640c4aecd32f4095a9d6571f1987eda4ee769393bb94de31215540214c49cd57390c28472c47139f465047c8d8788dfa9a94be
|
data/README.md
CHANGED
@@ -7,7 +7,7 @@
|
|
7
7
|
## DESCRIPTION:
|
8
8
|
|
9
9
|
Maps values from hashes with different structures and/or key names. Ideal for normalizing arbitrary data to be consumed by your applications, or to prepare your data for different display formats (ie. json).
|
10
|
-
|
10
|
+
|
11
11
|
Tiny module that allows you to easily adapt from one hash structure to another with a simple declarative DSL.
|
12
12
|
|
13
13
|
## FEATURES/PROBLEMS:
|
@@ -61,7 +61,7 @@ You can use HashMapper in your own little hash-like objects:
|
|
61
61
|
class NiceHash
|
62
62
|
include Enumerable
|
63
63
|
extend HashMapper
|
64
|
-
|
64
|
+
|
65
65
|
map from('/names/first'), to('/first_name')
|
66
66
|
map from('/names/last'), to('/last_name')
|
67
67
|
|
@@ -89,7 +89,7 @@ end
|
|
89
89
|
|
90
90
|
#### Coercing values
|
91
91
|
|
92
|
-
You want to make sure an incoming value gets converted to a certain type, so
|
92
|
+
You want to make sure an incoming value gets converted to a certain type, so
|
93
93
|
|
94
94
|
```ruby
|
95
95
|
{'one' => '1', 'two' => '2'}
|
@@ -97,7 +97,7 @@ You want to make sure an incoming value gets converted to a certain type, so
|
|
97
97
|
|
98
98
|
gets translated to
|
99
99
|
|
100
|
-
```ruby
|
100
|
+
```ruby
|
101
101
|
{:one => 1, :two => 2}
|
102
102
|
```
|
103
103
|
|
@@ -165,9 +165,9 @@ output = NameMapper.normalize(input) # => {:first_name => 'Mark', :last_name =>
|
|
165
165
|
|
166
166
|
NameMapper.denormalize(output) # => input
|
167
167
|
```
|
168
|
-
|
168
|
+
|
169
169
|
This will work with your block filters and even nested mappers (see below).
|
170
|
-
|
170
|
+
|
171
171
|
### Advanced usage
|
172
172
|
#### Array access
|
173
173
|
You want:
|
@@ -252,7 +252,7 @@ end
|
|
252
252
|
|
253
253
|
But HashMapper's nested mappers will actually do that for you if a value is an array, so:
|
254
254
|
|
255
|
-
```ruby
|
255
|
+
```ruby
|
256
256
|
map from('/employees'), to('employees'), using: UserMapper
|
257
257
|
```
|
258
258
|
... Will map each employee using UserMapper.
|
@@ -268,12 +268,12 @@ They all yield a block with 2 arguments - the hash you are mapping from and the
|
|
268
268
|
```ruby
|
269
269
|
class EggMapper
|
270
270
|
map from('/raw'), to('/fried')
|
271
|
-
|
271
|
+
|
272
272
|
before_normalize do |input, output|
|
273
|
-
input['raw'] ||= 'please' # this will give 'raw' a default value
|
273
|
+
input['raw'] ||= 'please' # this will give 'raw' a default value
|
274
274
|
input
|
275
275
|
end
|
276
|
-
|
276
|
+
|
277
277
|
after_denormalize do |input, output|
|
278
278
|
output.to_a # the denormalized object will now be an array, not a hash!!
|
279
279
|
end
|
@@ -290,23 +290,23 @@ You can pass one extra argument to before and after filters if you need to:
|
|
290
290
|
```ruby
|
291
291
|
class EggMapper
|
292
292
|
map from('/raw'), to('/fried')
|
293
|
-
|
293
|
+
|
294
294
|
before_normalize do |input, output, opts|
|
295
|
-
input['raw'] ||= 'please' unless opts[:no_default] # this will give 'raw' a default value
|
295
|
+
input['raw'] ||= 'please' unless opts[:no_default] # this will give 'raw' a default value
|
296
296
|
input
|
297
297
|
end
|
298
|
-
|
298
|
+
|
299
299
|
after_denormalize do |input, output, opts|
|
300
300
|
output.to_a # the denormalized object will now be an array, not a hash!!
|
301
301
|
end
|
302
302
|
|
303
303
|
end
|
304
304
|
|
305
|
-
EggMapper.normalize({}, no_default: true)
|
305
|
+
EggMapper.normalize({}, options: { no_default: true })
|
306
306
|
EggMapper.denormalize({fried: 4})
|
307
307
|
```
|
308
308
|
|
309
|
-
|
309
|
+
|
310
310
|
## REQUIREMENTS:
|
311
311
|
|
312
312
|
## TODO:
|
data/hash_mapper.gemspec
CHANGED
@@ -8,7 +8,7 @@ Gem::Specification.new do |s|
|
|
8
8
|
s.authors = ['Ismael Celis']
|
9
9
|
s.description = %q{Tiny module that allows you to easily adapt from one hash structure to another with a simple declarative DSL.}
|
10
10
|
s.email = %q{ismaelct@gmail.com}
|
11
|
-
|
11
|
+
|
12
12
|
s.files = `git ls-files`.split("\n")
|
13
13
|
s.homepage = %q{http://github.com/ismasan/hash_mapper}
|
14
14
|
s.rdoc_options = ['--charset=UTF-8']
|
@@ -17,14 +17,14 @@ Gem::Specification.new do |s|
|
|
17
17
|
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
18
18
|
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
19
19
|
s.require_paths = ['lib']
|
20
|
-
|
20
|
+
|
21
21
|
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
|
22
22
|
s.add_runtime_dependency("activesupport", ">= 4")
|
23
23
|
else
|
24
24
|
s.add_dependency("activesupport", ">= 4")
|
25
25
|
end
|
26
|
-
|
26
|
+
|
27
27
|
# specify any dependencies here; for example:
|
28
|
-
s.add_development_dependency 'rspec'
|
28
|
+
s.add_development_dependency 'rspec', '>= 3.9'
|
29
29
|
s.add_development_dependency 'rake'
|
30
30
|
end
|
data/lib/hash_mapper.rb
CHANGED
@@ -39,6 +39,7 @@ unless [].respond_to?(:inject_with_index)
|
|
39
39
|
end
|
40
40
|
|
41
41
|
module HashMapper
|
42
|
+
DEFAULT_OPTIONS = {}.freeze
|
42
43
|
|
43
44
|
def self.extended(base)
|
44
45
|
base.class_eval do
|
@@ -73,12 +74,12 @@ module HashMapper
|
|
73
74
|
{ using: mapper_class }
|
74
75
|
end
|
75
76
|
|
76
|
-
def normalize(a_hash,
|
77
|
-
perform_hash_mapping a_hash, :normalize,
|
77
|
+
def normalize(a_hash, options: DEFAULT_OPTIONS, context: nil)
|
78
|
+
perform_hash_mapping a_hash, :normalize, options: options, context: context
|
78
79
|
end
|
79
80
|
|
80
|
-
def denormalize(a_hash,
|
81
|
-
perform_hash_mapping a_hash, :denormalize,
|
81
|
+
def denormalize(a_hash, options: DEFAULT_OPTIONS, context: nil)
|
82
|
+
perform_hash_mapping a_hash, :denormalize, options: options, context: context
|
82
83
|
end
|
83
84
|
|
84
85
|
def before_normalize(&blk)
|
@@ -99,22 +100,22 @@ module HashMapper
|
|
99
100
|
|
100
101
|
protected
|
101
102
|
|
102
|
-
def perform_hash_mapping(a_hash, meth,
|
103
|
+
def perform_hash_mapping(a_hash, meth, options:, context:)
|
103
104
|
output = {}
|
104
105
|
|
105
106
|
# Before filters
|
106
107
|
a_hash = self.send(:"before_#{meth}_filters").inject(a_hash) do |memo, filter|
|
107
|
-
filter.call(memo, output,
|
108
|
+
filter.call(memo, output, options)
|
108
109
|
end
|
109
110
|
|
110
111
|
# Do the mapping
|
111
112
|
self.maps.each do |m|
|
112
|
-
m.process_into(output, a_hash, meth)
|
113
|
+
m.process_into(output, a_hash, method_name: meth, context: context)
|
113
114
|
end
|
114
115
|
|
115
116
|
# After filters
|
116
117
|
self.send(:"after_#{meth}_filters").inject(output) do |memo, filter|
|
117
|
-
filter.call(a_hash, memo,
|
118
|
+
filter.call(a_hash, memo, options)
|
118
119
|
end
|
119
120
|
end
|
120
121
|
|
@@ -132,14 +133,14 @@ module HashMapper
|
|
132
133
|
@default_value = options.fetch(:default, :hash_mapper_no_default)
|
133
134
|
end
|
134
135
|
|
135
|
-
def process_into(output, input,
|
136
|
-
path_1, path_2 = (
|
137
|
-
value = get_value_from_input(output, input, path_1,
|
136
|
+
def process_into(output, input, method_name: :normalize, context: nil)
|
137
|
+
path_1, path_2 = (method_name == :normalize ? [path_from, path_to] : [path_to, path_from])
|
138
|
+
value = get_value_from_input(output, input, path_1, method_name: method_name, context: context)
|
138
139
|
set_value_in_output(output, path_2, value)
|
139
140
|
end
|
140
141
|
protected
|
141
142
|
|
142
|
-
def get_value_from_input(output, input, path,
|
143
|
+
def get_value_from_input(output, input, path, method_name:, context:)
|
143
144
|
value = path.inject(input) do |h,e|
|
144
145
|
if h.is_a?(Hash)
|
145
146
|
v = [h[e.to_sym], h[e.to_s]].compact.first
|
@@ -149,7 +150,7 @@ module HashMapper
|
|
149
150
|
return :hash_mapper_no_value if v.nil?
|
150
151
|
v
|
151
152
|
end
|
152
|
-
delegated_mapper ? delegate_to_nested_mapper(value,
|
153
|
+
delegated_mapper ? delegate_to_nested_mapper(value, method_name, context: context) : value
|
153
154
|
end
|
154
155
|
|
155
156
|
def set_value_in_output(output, path, value)
|
@@ -163,14 +164,14 @@ module HashMapper
|
|
163
164
|
add_value_to_hash!(output, path, value)
|
164
165
|
end
|
165
166
|
|
166
|
-
def delegate_to_nested_mapper(value,
|
167
|
+
def delegate_to_nested_mapper(value, method_name, context:)
|
167
168
|
case value
|
168
169
|
when Array
|
169
|
-
value.map {|
|
170
|
+
value.map {|v| delegated_mapper.public_send(method_name, v, context: context)}
|
170
171
|
when nil
|
171
172
|
return :hash_mapper_no_value
|
172
173
|
else
|
173
|
-
delegated_mapper.
|
174
|
+
delegated_mapper.public_send(method_name, value, context: context)
|
174
175
|
end
|
175
176
|
end
|
176
177
|
|
data/lib/hash_mapper/version.rb
CHANGED
data/spec/hash_mapper_spec.rb
CHANGED
@@ -6,24 +6,24 @@ class OneLevel
|
|
6
6
|
end
|
7
7
|
|
8
8
|
describe 'mapping a hash with one level' do
|
9
|
-
|
9
|
+
|
10
10
|
before :each do
|
11
|
-
@from = {:
|
12
|
-
@to = {:
|
11
|
+
@from = {name: 'ismael'}
|
12
|
+
@to = {nombre: 'ismael'}
|
13
13
|
end
|
14
|
-
|
14
|
+
|
15
15
|
it "should map to" do
|
16
|
-
OneLevel.normalize(@from).
|
16
|
+
expect(OneLevel.normalize(@from)).to eq(@to)
|
17
17
|
end
|
18
|
-
|
18
|
+
|
19
19
|
it "should have indifferent access" do
|
20
|
-
OneLevel.normalize({'name' => 'ismael'}).
|
20
|
+
expect(OneLevel.normalize({'name' => 'ismael'})).to eq(@to)
|
21
21
|
end
|
22
|
-
|
22
|
+
|
23
23
|
it "should map back the other way" do
|
24
|
-
OneLevel.denormalize(@to).
|
24
|
+
expect(OneLevel.denormalize(@to)).to eq(@from)
|
25
25
|
end
|
26
|
-
|
26
|
+
|
27
27
|
end
|
28
28
|
|
29
29
|
class ManyLevels
|
@@ -35,35 +35,35 @@ class ManyLevels
|
|
35
35
|
end
|
36
36
|
|
37
37
|
describe 'mapping from one nested hash to another' do
|
38
|
-
|
38
|
+
|
39
39
|
before :each do
|
40
40
|
@from = {
|
41
|
-
:
|
42
|
-
:
|
43
|
-
:
|
44
|
-
:
|
45
|
-
:
|
41
|
+
name: 'ismael',
|
42
|
+
tagid: 1,
|
43
|
+
properties: {
|
44
|
+
type: 'BLAH',
|
45
|
+
egg: 33
|
46
46
|
}
|
47
47
|
}
|
48
|
-
|
48
|
+
|
49
49
|
@to = {
|
50
|
-
:
|
51
|
-
:
|
52
|
-
:
|
53
|
-
:
|
54
|
-
:
|
50
|
+
tag_id: 1,
|
51
|
+
chicken: 33,
|
52
|
+
tag_attributes: {
|
53
|
+
name: 'ismael',
|
54
|
+
type: 'BLAH'
|
55
55
|
}
|
56
56
|
}
|
57
57
|
end
|
58
|
-
|
58
|
+
|
59
59
|
it "should map from and to different depths" do
|
60
|
-
ManyLevels.normalize(@from).
|
60
|
+
expect(ManyLevels.normalize(@from)).to eq(@to)
|
61
61
|
end
|
62
|
-
|
62
|
+
|
63
63
|
it "should map back the other way" do
|
64
|
-
ManyLevels.denormalize(@to).
|
64
|
+
expect(ManyLevels.denormalize(@to)).to eq(@from)
|
65
65
|
end
|
66
|
-
|
66
|
+
|
67
67
|
end
|
68
68
|
|
69
69
|
class DifferentTypes
|
@@ -73,53 +73,53 @@ class DifferentTypes
|
|
73
73
|
end
|
74
74
|
|
75
75
|
describe 'coercing types' do
|
76
|
-
|
76
|
+
|
77
77
|
before :each do
|
78
78
|
@from = {
|
79
|
-
:
|
80
|
-
:
|
79
|
+
strings: {a: '10'},
|
80
|
+
integers: {b: 20}
|
81
81
|
}
|
82
|
-
|
82
|
+
|
83
83
|
@to = {
|
84
|
-
:
|
85
|
-
:
|
84
|
+
integers: {a: 10},
|
85
|
+
strings: {b: '20'}
|
86
86
|
}
|
87
87
|
end
|
88
|
-
|
88
|
+
|
89
89
|
it "should coerce values to specified types" do
|
90
|
-
DifferentTypes.normalize(@from).
|
90
|
+
expect(DifferentTypes.normalize(@from)).to eq(@to)
|
91
91
|
end
|
92
|
-
|
92
|
+
|
93
93
|
it "should coerce the other way if specified" do
|
94
|
-
DifferentTypes.denormalize(@to).
|
94
|
+
expect(DifferentTypes.denormalize(@to)).to eq(@from)
|
95
95
|
end
|
96
|
-
|
96
|
+
|
97
97
|
end
|
98
98
|
|
99
99
|
|
100
100
|
describe 'arrays in hashes' do
|
101
101
|
before :each do
|
102
102
|
@from = {
|
103
|
-
:
|
104
|
-
:
|
105
|
-
:
|
106
|
-
:
|
107
|
-
:
|
103
|
+
name: ['ismael','sachiyo'],
|
104
|
+
tagid: 1,
|
105
|
+
properties: {
|
106
|
+
type: 'BLAH',
|
107
|
+
egg: 33
|
108
108
|
}
|
109
109
|
}
|
110
|
-
|
110
|
+
|
111
111
|
@to = {
|
112
|
-
:
|
113
|
-
:
|
114
|
-
:
|
115
|
-
:
|
116
|
-
:
|
112
|
+
tag_id: 1,
|
113
|
+
chicken: 33,
|
114
|
+
tag_attributes: {
|
115
|
+
name: ['ismael','sachiyo'],
|
116
|
+
type: 'BLAH'
|
117
117
|
}
|
118
118
|
}
|
119
119
|
end
|
120
|
-
|
120
|
+
|
121
121
|
it "should map array values as normal" do
|
122
|
-
ManyLevels.normalize(@from).
|
122
|
+
expect(ManyLevels.normalize(@from)).to eq(@to)
|
123
123
|
end
|
124
124
|
end
|
125
125
|
|
@@ -129,34 +129,34 @@ class WithArrays
|
|
129
129
|
map from('/arrays/names[1]'), to('/last_name')
|
130
130
|
map from('/arrays/company'), to('/work/company')
|
131
131
|
end
|
132
|
-
|
132
|
+
|
133
133
|
describe "array indexes" do
|
134
134
|
before :each do
|
135
135
|
@from = {
|
136
|
-
:
|
137
|
-
:
|
138
|
-
:
|
136
|
+
arrays: {
|
137
|
+
names: ['ismael','celis'],
|
138
|
+
company: 'New Bamboo'
|
139
139
|
}
|
140
140
|
}
|
141
|
-
@to ={
|
142
|
-
:
|
143
|
-
:
|
144
|
-
:
|
141
|
+
@to = {
|
142
|
+
first_name: 'ismael',
|
143
|
+
last_name: 'celis',
|
144
|
+
work: {company: 'New Bamboo'}
|
145
145
|
}
|
146
146
|
end
|
147
|
-
|
147
|
+
|
148
148
|
it "should extract defined array values" do
|
149
|
-
WithArrays.normalize(@from).
|
149
|
+
expect(WithArrays.normalize(@from)).to eq(@to)
|
150
150
|
end
|
151
|
-
|
151
|
+
|
152
152
|
it "should map the other way restoring arrays" do
|
153
|
-
WithArrays.denormalize(@to).
|
153
|
+
expect(WithArrays.denormalize(@to)).to eq(@from)
|
154
154
|
end
|
155
155
|
end
|
156
156
|
|
157
157
|
class PersonWithBlock
|
158
158
|
extend HashMapper
|
159
|
-
def self.normalize(
|
159
|
+
def self.normalize(*_)
|
160
160
|
super
|
161
161
|
end
|
162
162
|
map from('/names/first'){|n| n.gsub('+','')}, to('/first_name'){|n| "+++#{n}+++"}
|
@@ -166,64 +166,64 @@ class PersonWithBlockOneWay
|
|
166
166
|
map from('/names/first'), to('/first_name') do |n| "+++#{n}+++" end
|
167
167
|
end
|
168
168
|
|
169
|
-
describe "with
|
169
|
+
describe "with block filters" do
|
170
170
|
before :each do
|
171
171
|
@from = {
|
172
|
-
:
|
172
|
+
names: {first: 'Ismael'}
|
173
173
|
}
|
174
174
|
@to = {
|
175
|
-
:
|
175
|
+
first_name: '+++Ismael+++'
|
176
176
|
}
|
177
177
|
end
|
178
|
-
|
178
|
+
|
179
179
|
it "should pass final value through given block" do
|
180
|
-
PersonWithBlock.normalize(@from).
|
180
|
+
expect(PersonWithBlock.normalize(@from)).to eq(@to)
|
181
181
|
end
|
182
|
-
|
182
|
+
|
183
183
|
it "should be able to map the other way using a block" do
|
184
|
-
PersonWithBlock.denormalize(@to).
|
184
|
+
expect(PersonWithBlock.denormalize(@to)).to eq(@from)
|
185
185
|
end
|
186
|
-
|
186
|
+
|
187
187
|
it "should accept a block for just one direction" do
|
188
|
-
PersonWithBlockOneWay.normalize(@from).
|
188
|
+
expect(PersonWithBlockOneWay.normalize(@from)).to eq(@to)
|
189
189
|
end
|
190
|
-
|
190
|
+
|
191
191
|
end
|
192
192
|
|
193
193
|
class ProjectMapper
|
194
194
|
extend HashMapper
|
195
|
-
|
195
|
+
|
196
196
|
map from('/name'), to('/project_name')
|
197
|
-
map from('/author_hash'), to('/author'), using
|
197
|
+
map from('/author_hash'), to('/author'), using: PersonWithBlock
|
198
198
|
end
|
199
199
|
|
200
200
|
describe "with nested mapper" do
|
201
201
|
before :each do
|
202
202
|
@from ={
|
203
|
-
:
|
204
|
-
:
|
205
|
-
:
|
203
|
+
name: 'HashMapper',
|
204
|
+
author_hash: {
|
205
|
+
names: {first: 'Ismael'}
|
206
206
|
}
|
207
207
|
}
|
208
208
|
@to = {
|
209
|
-
:
|
210
|
-
:
|
209
|
+
project_name: 'HashMapper',
|
210
|
+
author: {first_name: '+++Ismael+++'}
|
211
211
|
}
|
212
212
|
end
|
213
|
-
|
213
|
+
|
214
214
|
it "should delegate nested hashes to another mapper" do
|
215
|
-
ProjectMapper.normalize(@from).
|
215
|
+
expect(ProjectMapper.normalize(@from)).to eq(@to)
|
216
216
|
end
|
217
|
-
|
217
|
+
|
218
218
|
it "should translate the other way using nested hashes" do
|
219
|
-
ProjectMapper.denormalize(@to).
|
219
|
+
expect(ProjectMapper.denormalize(@to)).to eq(@from)
|
220
220
|
end
|
221
|
-
|
221
|
+
|
222
222
|
end
|
223
223
|
|
224
224
|
class CompanyMapper
|
225
225
|
extend HashMapper
|
226
|
-
|
226
|
+
|
227
227
|
map from('/name'), to('/company_name')
|
228
228
|
map from('/employees'), to('/employees') do |employees_array|
|
229
229
|
employees_array.collect{|emp_hash| PersonWithBlock.normalize(emp_hash)}
|
@@ -232,82 +232,82 @@ end
|
|
232
232
|
|
233
233
|
class CompanyEmployeesMapper
|
234
234
|
extend HashMapper
|
235
|
-
|
235
|
+
|
236
236
|
map from('/name'), to('/company_name')
|
237
|
-
map from('/employees'), to('/employees'), using
|
237
|
+
map from('/employees'), to('/employees'), using: PersonWithBlock
|
238
238
|
end
|
239
239
|
|
240
240
|
describe "with arrays of nested hashes" do
|
241
241
|
before :each do
|
242
242
|
@from = {
|
243
|
-
:
|
244
|
-
:
|
245
|
-
{:
|
246
|
-
{:
|
247
|
-
{:
|
243
|
+
name: 'New Bamboo',
|
244
|
+
employees: [
|
245
|
+
{names: {first: 'Ismael'}},
|
246
|
+
{names: {first: 'Sachiyo'}},
|
247
|
+
{names: {first: 'Pedro'}}
|
248
248
|
]
|
249
249
|
}
|
250
250
|
@to = {
|
251
|
-
:
|
252
|
-
:
|
253
|
-
{:
|
254
|
-
{:
|
255
|
-
{:
|
251
|
+
company_name: 'New Bamboo',
|
252
|
+
employees: [
|
253
|
+
{first_name: '+++Ismael+++'},
|
254
|
+
{first_name: '+++Sachiyo+++'},
|
255
|
+
{first_name: '+++Pedro+++'}
|
256
256
|
]
|
257
257
|
}
|
258
258
|
end
|
259
|
-
|
259
|
+
|
260
260
|
it "should pass array value though given block mapper" do
|
261
|
-
CompanyMapper.normalize(@from).
|
261
|
+
expect(CompanyMapper.normalize(@from)).to eq(@to)
|
262
262
|
end
|
263
|
-
|
263
|
+
|
264
264
|
it "should map array elements automatically" do
|
265
|
-
CompanyEmployeesMapper.normalize(@from).
|
265
|
+
expect(CompanyEmployeesMapper.normalize(@from)).to eq(@to)
|
266
266
|
end
|
267
267
|
end
|
268
268
|
|
269
269
|
class NoKeys
|
270
270
|
extend HashMapper
|
271
|
-
|
271
|
+
|
272
272
|
map from('/exists'), to('/exists_yahoo') #in
|
273
273
|
map from('/exists_as_nil'), to('/exists_nil') #in
|
274
274
|
map from('/foo'), to('/bar') # not in
|
275
|
-
|
275
|
+
|
276
276
|
end
|
277
277
|
|
278
278
|
describe "with non-matching maps" do
|
279
279
|
before :all do
|
280
280
|
@input = {
|
281
|
-
:
|
282
|
-
:
|
283
|
-
:
|
281
|
+
exists: 1,
|
282
|
+
exists_as_nil: nil,
|
283
|
+
doesnt_exist: 2
|
284
284
|
}
|
285
285
|
@output = {
|
286
|
-
:
|
286
|
+
exists_yahoo: 1
|
287
287
|
}
|
288
288
|
end
|
289
|
-
|
289
|
+
|
290
290
|
it "should ignore maps that don't exist" do
|
291
|
-
NoKeys.normalize(@input).
|
291
|
+
expect(NoKeys.normalize(@input)).to eq(@output)
|
292
292
|
end
|
293
293
|
end
|
294
294
|
|
295
295
|
describe "with false values" do
|
296
|
-
|
296
|
+
|
297
297
|
it "should include values in output" do
|
298
|
-
NoKeys.normalize({'exists' => false}).
|
299
|
-
NoKeys.normalize({:
|
298
|
+
expect(NoKeys.normalize({'exists' => false})).to eq({exists_yahoo: false})
|
299
|
+
expect(NoKeys.normalize({exists: false})).to eq({exists_yahoo: false})
|
300
300
|
end
|
301
|
-
|
301
|
+
|
302
302
|
end
|
303
303
|
|
304
304
|
describe "with nil values" do
|
305
|
-
|
305
|
+
|
306
306
|
it "should not include values in output" do
|
307
|
-
NoKeys.normalize({:
|
308
|
-
NoKeys.normalize({'exists' => nil}).
|
307
|
+
expect(NoKeys.normalize({exists: nil})).to eq({})
|
308
|
+
expect(NoKeys.normalize({'exists' => nil})).to eq({})
|
309
309
|
end
|
310
|
-
|
310
|
+
|
311
311
|
end
|
312
312
|
|
313
313
|
class WithBeforeFilters
|
@@ -341,22 +341,22 @@ end
|
|
341
341
|
|
342
342
|
describe "before and after filters" do
|
343
343
|
before(:all) do
|
344
|
-
@denorm = {:
|
345
|
-
@norm = {:
|
344
|
+
@denorm = {hello: 'wassup?!'}
|
345
|
+
@norm = {goodbye: 'seeya later!'}
|
346
346
|
end
|
347
|
+
|
347
348
|
it "should allow filtering before normalize" do
|
348
|
-
WithBeforeFilters.normalize(@denorm).
|
349
|
+
expect(WithBeforeFilters.normalize(@denorm)).to eq({goodbye: 'wassup?!', extra: 'extra wassup?! innit'})
|
349
350
|
end
|
350
351
|
it "should allow filtering before denormalize" do
|
351
|
-
WithBeforeFilters.denormalize(@norm).
|
352
|
+
expect(WithBeforeFilters.denormalize(@norm)).to eq({hello: 'changed'})
|
352
353
|
end
|
353
354
|
it "should allow filtering after normalize" do
|
354
|
-
WithAfterFilters.normalize(@denorm).
|
355
|
+
expect(WithAfterFilters.normalize(@denorm)).to eq([[:goodbye, 'wassup?!']])
|
355
356
|
end
|
356
357
|
it "should allow filtering after denormalize" do
|
357
|
-
WithAfterFilters.denormalize(@norm).
|
358
|
+
expect(WithAfterFilters.denormalize(@norm)).to eq({})
|
358
359
|
end
|
359
|
-
|
360
360
|
end
|
361
361
|
|
362
362
|
class NotRelated
|
@@ -380,23 +380,23 @@ end
|
|
380
380
|
describe "inherited mappers" do
|
381
381
|
before :all do
|
382
382
|
@from = {
|
383
|
-
:
|
384
|
-
:
|
385
|
-
:
|
383
|
+
a: 'a',
|
384
|
+
b: 'b',
|
385
|
+
c: 'c'
|
386
386
|
}
|
387
387
|
@to_b ={
|
388
|
-
:
|
389
|
-
:
|
388
|
+
a: {a: 'a'},
|
389
|
+
b: {b: 'b'}
|
390
390
|
}
|
391
391
|
|
392
392
|
end
|
393
|
-
|
393
|
+
|
394
394
|
it "should inherit mappings" do
|
395
|
-
B.normalize(@from).
|
395
|
+
expect(B.normalize(@from)).to eq(@to_b)
|
396
396
|
end
|
397
|
-
|
397
|
+
|
398
398
|
it "should not affect other mappers" do
|
399
|
-
NotRelated.normalize('n' => 'nn').
|
399
|
+
expect(NotRelated.normalize('n' => 'nn')).to eq({n: {n: 'nn'}})
|
400
400
|
end
|
401
401
|
end
|
402
402
|
|
@@ -407,23 +407,21 @@ class MixedMappings
|
|
407
407
|
end
|
408
408
|
|
409
409
|
describe "dealing with strings and symbols" do
|
410
|
-
|
410
|
+
|
411
411
|
it "should be able to normalize from a nested hash with string keys" do
|
412
|
-
MixedMappings.normalize(
|
412
|
+
expect(MixedMappings.normalize(
|
413
413
|
'big' => {'jobs' => 5},
|
414
414
|
'timble' => 3.2
|
415
|
-
).
|
416
|
-
:bingo => {:biscuit => 3.2}}
|
415
|
+
)).to eq({dodo: 5, bingo: {biscuit: 3.2}})
|
417
416
|
end
|
418
|
-
|
417
|
+
|
419
418
|
it "should not symbolized keys in value hashes" do
|
420
|
-
MixedMappings.normalize(
|
419
|
+
expect(MixedMappings.normalize(
|
421
420
|
'big' => {'jobs' => 5},
|
422
421
|
'timble' => {'string key' => 'value'}
|
423
|
-
).
|
424
|
-
:bingo => {:biscuit => {'string key' => 'value'}}}
|
422
|
+
)).to eq({dodo: 5, bingo: {biscuit: {'string key' => 'value'}}})
|
425
423
|
end
|
426
|
-
|
424
|
+
|
427
425
|
end
|
428
426
|
|
429
427
|
class DefaultValues
|
@@ -435,16 +433,16 @@ end
|
|
435
433
|
|
436
434
|
describe "default values" do
|
437
435
|
it "should use a default value whenever a key is not set" do
|
438
|
-
DefaultValues.normalize(
|
436
|
+
expect(DefaultValues.normalize(
|
439
437
|
'without_default' => 'some_value'
|
440
|
-
).
|
438
|
+
)).to eq({ not_defaulted: 'some_value', defaulted: 'the_default_value' })
|
441
439
|
end
|
442
440
|
|
443
441
|
it "should not use a default if a key is set (even if the value is falsy)" do
|
444
|
-
DefaultValues.normalize({
|
442
|
+
expect(DefaultValues.normalize({
|
445
443
|
'without_default' => 'some_value',
|
446
444
|
'with_default' => false
|
447
|
-
}).
|
445
|
+
})).to eq({ not_defaulted: 'some_value', defaulted: false })
|
448
446
|
end
|
449
447
|
end
|
450
448
|
|
@@ -488,20 +486,20 @@ end
|
|
488
486
|
|
489
487
|
describe 'multiple before filters' do
|
490
488
|
it 'runs before_normalize filters in the order they are defined' do
|
491
|
-
MultiBeforeFilter.normalize({ foo: 'X' }).
|
489
|
+
expect(MultiBeforeFilter.normalize({ foo: 'X' })).to eq({ bar: 'XYZ' })
|
492
490
|
end
|
493
491
|
|
494
492
|
it 'runs before_denormalize filters in the order they are defined' do
|
495
|
-
MultiBeforeFilter.denormalize({ bar: 'X' }).
|
493
|
+
expect(MultiBeforeFilter.denormalize({ bar: 'X' })).to eq({ foo: 'BAX' })
|
496
494
|
end
|
497
495
|
|
498
496
|
context 'when the filters are spread across classes' do
|
499
497
|
it 'runs before_normalize filters in the order they are defined' do
|
500
|
-
MultiBeforeFilterSubclass.normalize({ foo: 'X' }).
|
498
|
+
expect(MultiBeforeFilterSubclass.normalize({ foo: 'X' })).to eq({ bar: 'XYZ!' })
|
501
499
|
end
|
502
500
|
|
503
501
|
it 'runs before_denormalize filters in the order they are defined' do
|
504
|
-
MultiBeforeFilterSubclass.denormalize({ bar: 'X' }).
|
502
|
+
expect(MultiBeforeFilterSubclass.denormalize({ bar: 'X' })).to eq({ foo: 'CBAX' })
|
505
503
|
end
|
506
504
|
end
|
507
505
|
end
|
@@ -546,20 +544,20 @@ end
|
|
546
544
|
|
547
545
|
describe 'multiple after filters' do
|
548
546
|
it 'runs after_normalize filters in the order they are defined' do
|
549
|
-
MultiAfterFilter.normalize({ baz: '0' }).
|
547
|
+
expect(MultiAfterFilter.normalize({ baz: '0' })).to eq({ bat: '012' })
|
550
548
|
end
|
551
549
|
|
552
550
|
it 'runs after_denormalize filters in the order they are defined' do
|
553
|
-
MultiAfterFilter.denormalize({ bat: '0' }).
|
551
|
+
expect(MultiAfterFilter.denormalize({ bat: '0' })).to eq({ baz: '890' })
|
554
552
|
end
|
555
553
|
|
556
554
|
context 'when the filters are spread across classes' do
|
557
555
|
it 'runs after_normalize filters in the order they are defined' do
|
558
|
-
MultiAfterFilterSubclass.normalize({ baz: '0' }).
|
556
|
+
expect(MultiAfterFilterSubclass.normalize({ baz: '0' })).to eq({ bat: '0123' })
|
559
557
|
end
|
560
558
|
|
561
559
|
it 'runs after_denormalize filters in the order they are defined' do
|
562
|
-
MultiAfterFilterSubclass.denormalize({ bat: '0' }).
|
560
|
+
expect(MultiAfterFilterSubclass.denormalize({ bat: '0' })).to eq({ baz: '7890' })
|
563
561
|
end
|
564
562
|
end
|
565
563
|
end
|
@@ -591,15 +589,42 @@ end
|
|
591
589
|
describe 'with options' do
|
592
590
|
context 'when called with options' do
|
593
591
|
it 'passes the options to all the filters' do
|
594
|
-
WithOptions.normalize({}, bn: 1, an: 2).
|
595
|
-
WithOptions.denormalize({}, bdn: 1, adn: 2).
|
592
|
+
expect(WithOptions.normalize({}, options: { bn: 1, an: 2 })).to eq({bn: 1, an: 2})
|
593
|
+
expect(WithOptions.denormalize({}, options: { bdn: 1, adn: 2 })).to eq({bdn: 1, adn: 2})
|
596
594
|
end
|
597
595
|
end
|
598
596
|
|
599
597
|
context 'when called without options' do
|
600
598
|
it 'stills work' do
|
601
|
-
WithOptions.normalize({}).
|
602
|
-
WithOptions.denormalize({}).
|
599
|
+
expect(WithOptions.normalize({})).to eq({})
|
600
|
+
expect(WithOptions.denormalize({})).to eq({})
|
601
|
+
end
|
602
|
+
end
|
603
|
+
end
|
604
|
+
|
605
|
+
describe 'passing custom context object' do
|
606
|
+
it 'passes context object down to sub-mappers' do
|
607
|
+
friend_mapper = Class.new do
|
608
|
+
extend HashMapper
|
609
|
+
|
610
|
+
map from('/name'), to('/name')
|
611
|
+
|
612
|
+
def normalize(input, context: , **kargs)
|
613
|
+
context[:names] ||= []
|
614
|
+
context[:names] << input[:name]
|
615
|
+
self.class.normalize(input, context: context, **kargs)
|
616
|
+
end
|
603
617
|
end
|
618
|
+
|
619
|
+
mapper = Class.new do
|
620
|
+
extend HashMapper
|
621
|
+
|
622
|
+
map from('/friends'), to('/friends'), using: friend_mapper.new
|
623
|
+
end
|
624
|
+
|
625
|
+
input = {friends: [{name: 'Ismael', last_name: 'Celis'}, {name: 'Joe'}]}
|
626
|
+
ctx = {}
|
627
|
+
mapper.normalize(input, context: ctx)
|
628
|
+
expect(ctx[:names]).to eq(%w(Ismael Joe))
|
604
629
|
end
|
605
630
|
end
|
data/spec/spec_helper.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: hash_mapper
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Ismael Celis
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2020-09-01 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activesupport
|
@@ -30,14 +30,14 @@ dependencies:
|
|
30
30
|
requirements:
|
31
31
|
- - ">="
|
32
32
|
- !ruby/object:Gem::Version
|
33
|
-
version: '
|
33
|
+
version: '3.9'
|
34
34
|
type: :development
|
35
35
|
prerelease: false
|
36
36
|
version_requirements: !ruby/object:Gem::Requirement
|
37
37
|
requirements:
|
38
38
|
- - ">="
|
39
39
|
- !ruby/object:Gem::Version
|
40
|
-
version: '
|
40
|
+
version: '3.9'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
42
|
name: rake
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
@@ -72,7 +72,7 @@ files:
|
|
72
72
|
homepage: http://github.com/ismasan/hash_mapper
|
73
73
|
licenses: []
|
74
74
|
metadata: {}
|
75
|
-
post_install_message:
|
75
|
+
post_install_message:
|
76
76
|
rdoc_options:
|
77
77
|
- "--charset=UTF-8"
|
78
78
|
require_paths:
|
@@ -88,9 +88,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
88
88
|
- !ruby/object:Gem::Version
|
89
89
|
version: '0'
|
90
90
|
requirements: []
|
91
|
-
|
92
|
-
|
93
|
-
signing_key:
|
91
|
+
rubygems_version: 3.0.3
|
92
|
+
signing_key:
|
94
93
|
specification_version: 4
|
95
94
|
summary: Maps input hashes to a normalized format
|
96
95
|
test_files:
|