hocon 0.9.5 → 1.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +14 -2
- data/README.md +22 -10
- data/lib/hocon.rb +9 -3
- data/lib/hocon/config_factory.rb +4 -0
- data/lib/hocon/config_value_factory.rb +13 -2
- data/lib/hocon/impl/config_reference.rb +5 -2
- data/lib/hocon/impl/simple_config_origin.rb +1 -1
- data/spec/fixtures/parse_render/example1/input.conf +21 -0
- data/spec/fixtures/parse_render/example1/output.conf +26 -0
- data/spec/fixtures/parse_render/example1/output_nocomments.conf +17 -0
- data/spec/fixtures/parse_render/example2/input.conf +10 -0
- data/spec/fixtures/parse_render/example2/output.conf +17 -0
- data/spec/fixtures/parse_render/example2/output_nocomments.conf +17 -0
- data/spec/fixtures/parse_render/example3/input.conf +2 -0
- data/spec/fixtures/parse_render/example3/output.conf +2 -0
- data/spec/fixtures/parse_render/example4/input.json +6 -0
- data/spec/fixtures/parse_render/example4/output.conf +6 -0
- data/spec/fixtures/test_utils/resources/bom.conf +2 -0
- data/spec/fixtures/test_utils/resources/cycle.conf +1 -0
- data/spec/fixtures/test_utils/resources/file-include.conf +5 -0
- data/spec/fixtures/test_utils/resources/include-from-list.conf +4 -0
- data/spec/fixtures/test_utils/resources/subdir/bar.conf +1 -0
- data/spec/fixtures/test_utils/resources/subdir/baz.conf +1 -0
- data/spec/fixtures/test_utils/resources/subdir/foo.conf +5 -0
- data/spec/fixtures/test_utils/resources/test01.conf +80 -0
- data/spec/fixtures/test_utils/resources/test01.json +4 -0
- data/spec/fixtures/test_utils/resources/test03.conf +36 -0
- data/spec/spec_helper.rb +43 -0
- data/spec/test_utils.rb +757 -0
- data/spec/unit/typesafe/config/concatenation_spec.rb +417 -0
- data/spec/unit/typesafe/config/conf_parser_spec.rb +822 -0
- data/spec/unit/typesafe/config/config_document_parser_spec.rb +494 -0
- data/spec/unit/typesafe/config/config_document_spec.rb +576 -0
- data/spec/unit/typesafe/config/config_factory_spec.rb +120 -0
- data/spec/unit/typesafe/config/config_node_spec.rb +552 -0
- data/spec/unit/typesafe/config/config_value_factory_spec.rb +85 -0
- data/spec/unit/typesafe/config/config_value_spec.rb +935 -0
- data/spec/unit/typesafe/config/hocon_spec.rb +54 -0
- data/spec/unit/typesafe/config/path_spec.rb +261 -0
- data/spec/unit/typesafe/config/public_api_spec.rb +520 -0
- data/spec/unit/typesafe/config/simple_config_spec.rb +112 -0
- data/spec/unit/typesafe/config/token_spec.rb +188 -0
- data/spec/unit/typesafe/config/tokenizer_spec.rb +801 -0
- metadata +39 -3
@@ -0,0 +1,36 @@
|
|
1
|
+
{
|
2
|
+
"test01" : {
|
3
|
+
"ints" : 12,
|
4
|
+
include "test01",
|
5
|
+
"booleans" : 42
|
6
|
+
},
|
7
|
+
|
8
|
+
"test02" : {
|
9
|
+
include
|
10
|
+
|
11
|
+
"test02.conf"
|
12
|
+
},
|
13
|
+
|
14
|
+
"equiv01" : {
|
15
|
+
include "equiv01/original.json"
|
16
|
+
},
|
17
|
+
|
18
|
+
# missing includes are supposed to be silently ignored
|
19
|
+
nonexistent {
|
20
|
+
include "nothere"
|
21
|
+
include "nothere.conf"
|
22
|
+
include "nothere.json"
|
23
|
+
include "nothere.properties"
|
24
|
+
}
|
25
|
+
|
26
|
+
# make sure included file substitutions fall back to parent file,
|
27
|
+
# both when the include is at the root (so doesn't need to have
|
28
|
+
# substitutions adjusted) and when it is not.
|
29
|
+
foo="This is in the including file"
|
30
|
+
bar="This is in the including file"
|
31
|
+
include "test03-included.conf"
|
32
|
+
|
33
|
+
subtree {
|
34
|
+
include "test03-included.conf"
|
35
|
+
}
|
36
|
+
}
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,43 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
FIXTURE_DIR = File.join(dir = File.expand_path(File.dirname(__FILE__)), "fixtures")
|
4
|
+
|
5
|
+
EXAMPLE1 = { :hash =>
|
6
|
+
{"foo" => {
|
7
|
+
"bar" => {
|
8
|
+
"baz" => 42,
|
9
|
+
"abracadabra" => "hi",
|
10
|
+
"yahoo" => "yippee",
|
11
|
+
"boom" => [1, 2, {"derp" => "duh"}, 4],
|
12
|
+
"empty" => [],
|
13
|
+
"truthy" => true,
|
14
|
+
"falsy" => false
|
15
|
+
}}},
|
16
|
+
:name => "example1",
|
17
|
+
}
|
18
|
+
|
19
|
+
EXAMPLE2 = { :hash =>
|
20
|
+
{"jruby-puppet"=> {
|
21
|
+
"jruby-pools" => [{"environment" => "production"}],
|
22
|
+
"load-path" => ["/usr/lib/ruby/site_ruby/1.8", "/usr/lib/ruby/site_ruby/1.8"],
|
23
|
+
"master-conf-dir" => "/etc/puppet",
|
24
|
+
"master-var-dir" => "/var/lib/puppet",
|
25
|
+
},
|
26
|
+
"webserver" => {"host" => "1.2.3.4"}},
|
27
|
+
:name => "example2",
|
28
|
+
}
|
29
|
+
|
30
|
+
EXAMPLE3 = { :hash =>
|
31
|
+
{"a" => true,
|
32
|
+
"b" => true},
|
33
|
+
:name => "example3",
|
34
|
+
}
|
35
|
+
|
36
|
+
EXAMPLE4 = { :hash =>
|
37
|
+
{"kermit" => "frog",
|
38
|
+
"miss" => "piggy",
|
39
|
+
"bert" => "ernie",
|
40
|
+
"janice" => "guitar"},
|
41
|
+
:name => "example4",
|
42
|
+
}
|
43
|
+
|
data/spec/test_utils.rb
ADDED
@@ -0,0 +1,757 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'hocon'
|
4
|
+
require 'spec_helper'
|
5
|
+
require 'rspec'
|
6
|
+
require 'hocon/impl/config_reference'
|
7
|
+
require 'hocon/impl/substitution_expression'
|
8
|
+
require 'hocon/impl/path_parser'
|
9
|
+
require 'hocon/impl/config_impl_util'
|
10
|
+
require 'hocon/impl/config_node_simple_value'
|
11
|
+
require 'hocon/impl/config_node_single_token'
|
12
|
+
require 'hocon/impl/config_node_object'
|
13
|
+
require 'hocon/impl/config_node_array'
|
14
|
+
require 'hocon/impl/config_node_concatenation'
|
15
|
+
|
16
|
+
module TestUtils
|
17
|
+
Tokens = Hocon::Impl::Tokens
|
18
|
+
ConfigInt = Hocon::Impl::ConfigInt
|
19
|
+
ConfigDouble = Hocon::Impl::ConfigDouble
|
20
|
+
ConfigString = Hocon::Impl::ConfigString
|
21
|
+
ConfigNull = Hocon::Impl::ConfigNull
|
22
|
+
ConfigBoolean = Hocon::Impl::ConfigBoolean
|
23
|
+
ConfigReference = Hocon::Impl::ConfigReference
|
24
|
+
SubstitutionExpression = Hocon::Impl::SubstitutionExpression
|
25
|
+
ConfigConcatenation = Hocon::Impl::ConfigConcatenation
|
26
|
+
Path = Hocon::Impl::Path
|
27
|
+
EOF = Hocon::Impl::TokenType::EOF
|
28
|
+
|
29
|
+
include RSpec::Matchers
|
30
|
+
|
31
|
+
def self.intercept(exception_type, & block)
|
32
|
+
thrown = nil
|
33
|
+
result = nil
|
34
|
+
begin
|
35
|
+
result = block.call
|
36
|
+
rescue => e
|
37
|
+
if e.is_a?(exception_type)
|
38
|
+
thrown = e
|
39
|
+
else
|
40
|
+
raise "Expected exception #{exception_type} was not thrown, got #{e.class}: #{e}\n#{e.backtrace.join("\n")}"
|
41
|
+
end
|
42
|
+
end
|
43
|
+
if thrown.nil?
|
44
|
+
raise "Expected exception #{exception_type} was not thrown, no exception was thrown and got result #{result}"
|
45
|
+
end
|
46
|
+
thrown
|
47
|
+
end
|
48
|
+
|
49
|
+
class ParseTest
|
50
|
+
|
51
|
+
def self.from_s(test)
|
52
|
+
ParseTest.new(false, false, test)
|
53
|
+
end
|
54
|
+
|
55
|
+
def self.from_pair(lift_behavior_unexpected, test)
|
56
|
+
ParseTest.new(lift_behavior_unexpected, false, test)
|
57
|
+
end
|
58
|
+
|
59
|
+
def initialize(lift_behavior_unexpected, whitespace_matters, test)
|
60
|
+
@lift_behavior_unexpected = lift_behavior_unexpected
|
61
|
+
@whitespace_matters = whitespace_matters
|
62
|
+
@test = test
|
63
|
+
end
|
64
|
+
attr_reader :test
|
65
|
+
|
66
|
+
def lift_behavior_unexpected?
|
67
|
+
@lift_behavior_unexpected
|
68
|
+
end
|
69
|
+
|
70
|
+
def whitespace_matters?
|
71
|
+
@whitespace_matters
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
|
76
|
+
# note: it's important to put {} or [] at the root if you
|
77
|
+
# want to test "invalidity reasons" other than "wrong root"
|
78
|
+
InvalidJsonInvalidConf = [
|
79
|
+
ParseTest.from_s("{"),
|
80
|
+
ParseTest.from_s("}"),
|
81
|
+
ParseTest.from_s("["),
|
82
|
+
ParseTest.from_s("]"),
|
83
|
+
ParseTest.from_s(","),
|
84
|
+
ParseTest.from_pair(true, "10"), # value not in array or object, lift-json now allows this
|
85
|
+
ParseTest.from_pair(true, "\"foo\""), # value not in array or object, lift-json allows it
|
86
|
+
ParseTest.from_s(")\""), # single quote by itself
|
87
|
+
ParseTest.from_pair(true, "[,]"), # array with just a comma in it; lift is OK with this
|
88
|
+
ParseTest.from_pair(true, "[,,]"), # array with just two commas in it; lift is cool with this too
|
89
|
+
ParseTest.from_pair(true, "[1,2,,]"), # array with two trailing commas
|
90
|
+
ParseTest.from_pair(true, "[,1,2]"), # array with initial comma
|
91
|
+
ParseTest.from_pair(true, "{ , }"), # object with just a comma in it
|
92
|
+
ParseTest.from_pair(true, "{ , , }"), # object with just two commas in it
|
93
|
+
ParseTest.from_s("{ 1,2 }"), # object with single values not key-value pair
|
94
|
+
ParseTest.from_pair(true, '{ , "foo" : 10 }'), # object starts with comma
|
95
|
+
ParseTest.from_pair(true, "{ \"foo\" : 10 ,, }"), # object has two trailing commas
|
96
|
+
ParseTest.from_s(") \"a\" : 10 ,, "), # two trailing commas for braceless root object
|
97
|
+
ParseTest.from_s("{ \"foo\" : }"), # no value in object
|
98
|
+
ParseTest.from_s("{ : 10 }"), # no key in object
|
99
|
+
ParseTest.from_pair(true, " \"foo\" : "), # no value in object with no braces; lift-json thinks this is acceptable
|
100
|
+
ParseTest.from_pair(true, " : 10 "), # no key in object with no braces; lift-json is cool with this too
|
101
|
+
ParseTest.from_s(') "foo" : 10 } '), # close brace but no open
|
102
|
+
ParseTest.from_s(") \"foo\" : 10 } "), # close brace but no open
|
103
|
+
ParseTest.from_s(") \"foo\" : 10 [ "), # no-braces object with trailing gunk
|
104
|
+
ParseTest.from_s("{ \"foo\" }"), # no value or colon
|
105
|
+
ParseTest.from_s("{ \"a\" : [ }"), # [ is not a valid value
|
106
|
+
ParseTest.from_s("{ \"foo\" : 10, true }"), # non-key after comma
|
107
|
+
ParseTest.from_s("{ foo \n bar : 10 }"), # newline in the middle of the unquoted key
|
108
|
+
ParseTest.from_s("[ 1, \\"), # ends with backslash
|
109
|
+
# these two problems are ignored by the lift tokenizer
|
110
|
+
ParseTest.from_s("[:\"foo\", \"bar\"]"), # colon in an array; lift doesn't throw (tokenizer erases it)
|
111
|
+
ParseTest.from_s("[\"foo\" : \"bar\"]"), # colon in an array another way, lift ignores (tokenizer erases it)
|
112
|
+
ParseTest.from_s("[ \"hello ]"), # unterminated string
|
113
|
+
ParseTest.from_pair(true, "{ \"foo\" , true }"), # comma instead of colon, lift is fine with this
|
114
|
+
ParseTest.from_pair(true, "{ \"foo\" : true \"bar\" : false }"), # missing comma between fields, lift fine with this
|
115
|
+
ParseTest.from_s("[ 10, }]"), # array with } as an element
|
116
|
+
ParseTest.from_s("[ 10, {]"), # array with { as an element
|
117
|
+
ParseTest.from_s("{}x"), # trailing invalid token after the root object
|
118
|
+
ParseTest.from_s("[]x"), # trailing invalid token after the root array
|
119
|
+
ParseTest.from_pair(true, "{}{}"), # trailing token after the root object - lift OK with it
|
120
|
+
ParseTest.from_pair(true, "{}true"), # trailing token after the root object; lift ignores the {}
|
121
|
+
ParseTest.from_pair(true, "[]{}"), # trailing valid token after the root array
|
122
|
+
ParseTest.from_pair(true, "[]true"), # trailing valid token after the root array, lift ignores the []
|
123
|
+
ParseTest.from_s("[${]"), # unclosed substitution
|
124
|
+
ParseTest.from_s("[$]"), # '$' by itself
|
125
|
+
ParseTest.from_s("[$ ]"), # '$' by itself with spaces after
|
126
|
+
ParseTest.from_s("[${}]"), # empty substitution (no path)
|
127
|
+
ParseTest.from_s("[${?}]"), # no path with ? substitution
|
128
|
+
ParseTest.new(false, true, "[${ ?foo}]"), # space before ? not allowed
|
129
|
+
ParseTest.from_s(%q|{ "a" : [1,2], "b" : y${a}z }|), # trying to interpolate an array in a string
|
130
|
+
ParseTest.from_s(%q|{ "a" : { "c" : 2 }, "b" : y${a}z }|), # trying to interpolate an object in a string
|
131
|
+
ParseTest.from_s(%q|{ "a" : ${a} }|), # simple cycle
|
132
|
+
ParseTest.from_s(%q|[ { "a" : 2, "b" : ${${a}} } ]|), # nested substitution
|
133
|
+
ParseTest.from_s("[ = ]"), # = is not a valid token in unquoted text
|
134
|
+
ParseTest.from_s("[ + ]"),
|
135
|
+
ParseTest.from_s("[ # ]"),
|
136
|
+
ParseTest.from_s("[ ` ]"),
|
137
|
+
ParseTest.from_s("[ ^ ]"),
|
138
|
+
ParseTest.from_s("[ ? ]"),
|
139
|
+
ParseTest.from_s("[ ! ]"),
|
140
|
+
ParseTest.from_s("[ @ ]"),
|
141
|
+
ParseTest.from_s("[ * ]"),
|
142
|
+
ParseTest.from_s("[ & ]"),
|
143
|
+
ParseTest.from_s("[ \\ ]"),
|
144
|
+
ParseTest.from_s("+="),
|
145
|
+
ParseTest.from_s("[ += ]"),
|
146
|
+
ParseTest.from_s("+= 10"),
|
147
|
+
ParseTest.from_s("10 +="),
|
148
|
+
ParseTest.from_s("[ 10e+3e ]"), # "+" not allowed in unquoted strings, and not a valid number
|
149
|
+
ParseTest.from_pair(true, "[ \"foo\nbar\" ]"), # unescaped newline in quoted string, lift doesn't care
|
150
|
+
ParseTest.from_s("[ # comment ]"),
|
151
|
+
ParseTest.from_s("${ #comment }"),
|
152
|
+
ParseTest.from_s("[ // comment ]"),
|
153
|
+
ParseTest.from_s("${ // comment }"),
|
154
|
+
# ParseTest.from_s("{ include \"bar\" : 10 }"), # include with a value after it
|
155
|
+
ParseTest.from_s("{ include foo }"), # include with unquoted string
|
156
|
+
ParseTest.from_s("{ include : { \"a\" : 1 } }"), # include used as unquoted key
|
157
|
+
ParseTest.from_s("a="), # no value
|
158
|
+
ParseTest.from_s("a:"), # no value with colon
|
159
|
+
ParseTest.from_s("a= "), # no value with whitespace after
|
160
|
+
ParseTest.from_s("a.b="), # no value with path
|
161
|
+
ParseTest.from_s("{ a= }"), # no value inside braces
|
162
|
+
ParseTest.from_s("{ a: }") # no value with colon inside braces
|
163
|
+
]
|
164
|
+
|
165
|
+
# We'll automatically try each of these with whitespace modifications
|
166
|
+
# so no need to add every possible whitespace variation
|
167
|
+
ValidJson = [
|
168
|
+
ParseTest.from_s("{}"),
|
169
|
+
ParseTest.from_s("[]"),
|
170
|
+
ParseTest.from_s(%q|{ "foo" : "bar" }|),
|
171
|
+
ParseTest.from_s(%q|["foo", "bar"]|),
|
172
|
+
ParseTest.from_s(%q|{ "foo" : 42 }|),
|
173
|
+
ParseTest.from_s("{ \"foo\"\n : 42 }"), # newline after key
|
174
|
+
ParseTest.from_s("{ \"foo\" : \n 42 }"), # newline after colon
|
175
|
+
ParseTest.from_s(%q|[10, 11]|),
|
176
|
+
ParseTest.from_s(%q|[10,"foo"]|),
|
177
|
+
ParseTest.from_s(%q|{ "foo" : "bar", "baz" : "boo" }|),
|
178
|
+
ParseTest.from_s(%q|{ "foo" : { "bar" : "baz" }, "baz" : "boo" }|),
|
179
|
+
ParseTest.from_s(%q|{ "foo" : { "bar" : "baz", "woo" : "w00t" }, "baz" : "boo" }|),
|
180
|
+
ParseTest.from_s(%q|{ "foo" : [10,11,12], "baz" : "boo" }|),
|
181
|
+
ParseTest.from_s(%q|[{},{},{},{}]|),
|
182
|
+
ParseTest.from_s(%q|[[[[[[]]]]]]|),
|
183
|
+
ParseTest.from_s(%q|[[1], [1,2], [1,2,3], []]|), # nested multiple-valued array
|
184
|
+
ParseTest.from_s(%q|{"a":{"a":{"a":{"a":{"a":{"a":{"a":{"a":42}}}}}}}}|),
|
185
|
+
ParseTest.from_s("[ \"#comment\" ]"), # quoted # comment
|
186
|
+
ParseTest.from_s("[ \"//comment\" ]"), # quoted // comment
|
187
|
+
# this long one is mostly to test rendering
|
188
|
+
ParseTest.from_s(%q|{ "foo" : { "bar" : "baz", "woo" : "w00t" }, "baz" : { "bar" : "baz", "woo" : [1,2,3,4], "w00t" : true, "a" : false, "b" : 3.14, "c" : null } }|),
|
189
|
+
ParseTest.from_s("{}"),
|
190
|
+
ParseTest.from_pair(true, "[ 10e+3 ]") # "+" in a number (lift doesn't handle))
|
191
|
+
]
|
192
|
+
|
193
|
+
ValidConfInvalidJson = [
|
194
|
+
ParseTest.from_s(""), # empty document
|
195
|
+
ParseTest.from_s(" "), # empty document single space
|
196
|
+
ParseTest.from_s("\n"), # empty document single newline
|
197
|
+
ParseTest.from_s(" \n \n \n\n\n"), # complicated empty document
|
198
|
+
ParseTest.from_s("# foo"), # just a comment
|
199
|
+
ParseTest.from_s("# bar\n"), # just a comment with a newline
|
200
|
+
ParseTest.from_s("# foo\n//bar"), # comment then another with no newline
|
201
|
+
ParseTest.from_s(%q|{ "foo" = 42 }|), # equals rather than colon
|
202
|
+
ParseTest.from_s(%q|{ foo { "bar" : 42 } }|), # omit the colon for object value
|
203
|
+
ParseTest.from_s(%q|{ foo baz { "bar" : 42 } }|), # omit the colon with unquoted key with spaces
|
204
|
+
ParseTest.from_s(%q| "foo" : 42 |), # omit braces on root object
|
205
|
+
ParseTest.from_s(%q|{ "foo" : bar }|), # no quotes on value
|
206
|
+
ParseTest.from_s(%q|{ "foo" : null bar 42 baz true 3.14 "hi" }|), # bunch of values to concat into a string
|
207
|
+
ParseTest.from_s("{ foo : \"bar\" }"), # no quotes on key
|
208
|
+
ParseTest.from_s("{ foo : bar }"), # no quotes on key or value
|
209
|
+
ParseTest.from_s("{ foo.bar : bar }"), # path expression in key
|
210
|
+
ParseTest.from_s("{ foo.\"hello world\".baz : bar }"), # partly-quoted path expression in key
|
211
|
+
ParseTest.from_s("{ foo.bar \n : bar }"), # newline after path expression in key
|
212
|
+
ParseTest.from_s("{ foo bar : bar }"), # whitespace in the key
|
213
|
+
ParseTest.from_s("{ true : bar }"), # key is a non-string token
|
214
|
+
ParseTest.from_pair(true, %q|{ "foo" : "bar", "foo" : "bar2" }|), # dup keys - lift just returns both
|
215
|
+
ParseTest.from_pair(true, "[ 1, 2, 3, ]"), # single trailing comma (lift fails to throw)
|
216
|
+
ParseTest.from_pair(true, "[1,2,3 , ]"), # single trailing comma with whitespace
|
217
|
+
ParseTest.from_pair(true, "[1,2,3\n\n , \n]"), # single trailing comma with newlines
|
218
|
+
ParseTest.from_pair(true, "[1,]"), # single trailing comma with one-element array
|
219
|
+
ParseTest.from_pair(true, "{ \"foo\" : 10, }"), # extra trailing comma (lift fails to throw)
|
220
|
+
ParseTest.from_pair(true, "{ \"a\" : \"b\", }"), # single trailing comma in object
|
221
|
+
ParseTest.from_s("{ a : b, }"), # single trailing comma in object (unquoted strings)
|
222
|
+
ParseTest.from_s("{ a : b \n , \n }"), # single trailing comma in object with newlines
|
223
|
+
ParseTest.from_s("a : b, c : d,"), # single trailing comma in object with no root braces
|
224
|
+
ParseTest.from_s("{ a : b\nc : d }"), # skip comma if there's a newline
|
225
|
+
ParseTest.from_s("a : b\nc : d"), # skip comma if there's a newline and no root braces
|
226
|
+
ParseTest.from_s("a : b\nc : d,"), # skip one comma but still have one at the end
|
227
|
+
ParseTest.from_s("[ foo ]"), # not a known token in JSON
|
228
|
+
ParseTest.from_s("[ t ]"), # start of "true" but ends wrong in JSON
|
229
|
+
ParseTest.from_s("[ tx ]"),
|
230
|
+
ParseTest.from_s("[ tr ]"),
|
231
|
+
ParseTest.from_s("[ trx ]"),
|
232
|
+
ParseTest.from_s("[ tru ]"),
|
233
|
+
ParseTest.from_s("[ trux ]"),
|
234
|
+
ParseTest.from_s("[ truex ]"),
|
235
|
+
ParseTest.from_s("[ 10x ]"), # number token with trailing junk
|
236
|
+
ParseTest.from_s("[ / ]"), # unquoted string "slash"
|
237
|
+
ParseTest.from_s("{ include \"foo\" }"), # valid include
|
238
|
+
ParseTest.from_s("{ include\n\"foo\" }"), # include with just a newline separating from string
|
239
|
+
ParseTest.from_s("{ include\"foo\" }"), # include with no whitespace after it
|
240
|
+
ParseTest.from_s("[ include ]"), # include can be a string value in an array
|
241
|
+
ParseTest.from_s("{ foo : include }"), # include can be a field value also
|
242
|
+
ParseTest.from_s("{ include \"foo\", \"a\" : \"b\" }"), # valid include followed by comma and field
|
243
|
+
ParseTest.from_s("{ foo include : 42 }"), # valid to have a key not starting with include
|
244
|
+
ParseTest.from_s("[ ${foo} ]"),
|
245
|
+
ParseTest.from_s("[ ${?foo} ]"),
|
246
|
+
ParseTest.from_s("[ ${\"foo\"} ]"),
|
247
|
+
ParseTest.from_s("[ ${foo.bar} ]"),
|
248
|
+
ParseTest.from_s("[ abc xyz ${foo.bar} qrs tuv ]"), # value concatenation
|
249
|
+
ParseTest.from_s("[ 1, 2, 3, blah ]"),
|
250
|
+
ParseTest.from_s("[ ${\"foo.bar\"} ]"),
|
251
|
+
ParseTest.from_s("{} # comment"),
|
252
|
+
ParseTest.from_s("{} // comment"),
|
253
|
+
ParseTest.from_s(%q|{ "foo" #comment
|
254
|
+
: 10 }|),
|
255
|
+
ParseTest.from_s(%q|{ "foo" // comment
|
256
|
+
: 10 }|),
|
257
|
+
ParseTest.from_s(%q|{ "foo" : #comment
|
258
|
+
10 }|),
|
259
|
+
ParseTest.from_s(%q|{ "foo" : // comment
|
260
|
+
10 }|),
|
261
|
+
ParseTest.from_s(%q|{ "foo" : 10 #comment
|
262
|
+
}|),
|
263
|
+
ParseTest.from_s(%q|{ "foo" : 10 // comment
|
264
|
+
}|),
|
265
|
+
ParseTest.from_s(%q|[ 10, # comment
|
266
|
+
11]|),
|
267
|
+
ParseTest.from_s(%q|[ 10, // comment
|
268
|
+
11]|),
|
269
|
+
ParseTest.from_s(%q|[ 10 # comment
|
270
|
+
, 11]|),
|
271
|
+
ParseTest.from_s(%q|[ 10 // comment
|
272
|
+
, 11]|),
|
273
|
+
ParseTest.from_s(%q|{ /a/b/c : 10 }|), # key has a slash in it
|
274
|
+
ParseTest.new(false, true, "[${ foo.bar}]"), # substitution with leading spaces
|
275
|
+
ParseTest.new(false, true, "[${foo.bar }]"), # substitution with trailing spaces
|
276
|
+
ParseTest.new(false, true, "[${ \"foo.bar\"}]"), # substitution with leading spaces and quoted
|
277
|
+
ParseTest.new(false, true, "[${\"foo.bar\" }]"), # substitution with trailing spaces and quoted
|
278
|
+
ParseTest.from_s(%q|[ ${"foo""bar"} ]|), # multiple strings in substitution
|
279
|
+
ParseTest.from_s(%q|[ ${foo "bar" baz} ]|), # multiple strings and whitespace in substitution
|
280
|
+
ParseTest.from_s("[${true}]"), # substitution with unquoted true token
|
281
|
+
ParseTest.from_s("a = [], a += b"), # += operator with previous init
|
282
|
+
ParseTest.from_s("{ a = [], a += 10 }"), # += in braces object with previous init
|
283
|
+
ParseTest.from_s("a += b"), # += operator without previous init
|
284
|
+
ParseTest.from_s("{ a += 10 }"), # += in braces object without previous init
|
285
|
+
ParseTest.from_s("[ 10e3e3 ]"), # two exponents. this should parse to a number plus string "e3"
|
286
|
+
ParseTest.from_s("[ 1-e3 ]"), # malformed number should end up as a string instead
|
287
|
+
ParseTest.from_s("[ 1.0.0 ]"), # two decimals, should end up as a string
|
288
|
+
ParseTest.from_s("[ 1.0. ]")
|
289
|
+
]
|
290
|
+
|
291
|
+
|
292
|
+
InvalidConf = InvalidJsonInvalidConf
|
293
|
+
|
294
|
+
# .conf is a superset of JSON so validJson just goes in here
|
295
|
+
ValidConf = ValidConfInvalidJson + ValidJson
|
296
|
+
|
297
|
+
def self.add_offending_json_to_exception(parser_name, s, & block)
|
298
|
+
begin
|
299
|
+
block.call
|
300
|
+
rescue => e
|
301
|
+
tokens =
|
302
|
+
begin
|
303
|
+
"tokens: " + TestUtils.tokenize_as_list(s).join("\n")
|
304
|
+
rescue => tokenize_ex
|
305
|
+
"tokenizer failed: #{tokenize_ex}\n#{tokenize_ex.backtrace.join("\n")}"
|
306
|
+
end
|
307
|
+
raise ArgumentError, "#{parser_name} parser did wrong thing on '#{s}', #{tokens}; error: #{e}\n#{e.backtrace.join("\n")}"
|
308
|
+
end
|
309
|
+
end
|
310
|
+
|
311
|
+
def self.whitespace_variations(tests, valid_in_lift)
|
312
|
+
variations = [
|
313
|
+
Proc.new { |s| s }, # identity
|
314
|
+
Proc.new { |s| " " + s },
|
315
|
+
Proc.new { |s| s + " " },
|
316
|
+
Proc.new { |s| " " + s + " " },
|
317
|
+
Proc.new { |s| s.gsub(" ", "") }, # this would break with whitespace in a key or value
|
318
|
+
Proc.new { |s| s.gsub(":", " : ") }, # could break with : in a key or value
|
319
|
+
Proc.new { |s| s.gsub(",", " , ") }, # could break with , in a key or value
|
320
|
+
]
|
321
|
+
tests.map { |t|
|
322
|
+
if t.whitespace_matters?
|
323
|
+
t
|
324
|
+
else
|
325
|
+
with_no_ascii =
|
326
|
+
if t.test.include?(" ")
|
327
|
+
[ParseTest.from_pair(valid_in_lift,
|
328
|
+
t.test.gsub(" ", "\u2003"))] # 2003 = em space, to test non-ascii whitespace
|
329
|
+
else
|
330
|
+
[]
|
331
|
+
end
|
332
|
+
|
333
|
+
with_no_ascii << variations.reduce([]) { |acc, v|
|
334
|
+
acc << ParseTest.from_pair(t.lift_behavior_unexpected?, v.call(t.test))
|
335
|
+
acc
|
336
|
+
}
|
337
|
+
end
|
338
|
+
}.flatten
|
339
|
+
end
|
340
|
+
|
341
|
+
|
342
|
+
##################
|
343
|
+
# Tokenizer Functions
|
344
|
+
##################
|
345
|
+
def self.wrap_tokens(token_list)
|
346
|
+
# Wraps token_list in START and EOF tokens
|
347
|
+
[Tokens::START] + token_list + [Tokens::EOF]
|
348
|
+
end
|
349
|
+
|
350
|
+
def self.tokenize(config_origin, input)
|
351
|
+
Hocon::Impl::Tokenizer.tokenize(config_origin, input, Hocon::ConfigSyntax::CONF)
|
352
|
+
end
|
353
|
+
|
354
|
+
def self.tokenize_from_s(s)
|
355
|
+
tokenize(Hocon::Impl::SimpleConfigOrigin.new_simple("anonymous Reader"),
|
356
|
+
StringIO.new(s))
|
357
|
+
end
|
358
|
+
|
359
|
+
def self.tokenize_as_list(input_string)
|
360
|
+
token_iterator = tokenize_from_s(input_string)
|
361
|
+
|
362
|
+
token_iterator.to_list
|
363
|
+
end
|
364
|
+
|
365
|
+
def self.tokenize_as_string(input_string)
|
366
|
+
Hocon::Impl::Tokenizer.render(tokenize_from_s(input_string))
|
367
|
+
end
|
368
|
+
|
369
|
+
def self.config_node_simple_value(value)
|
370
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(value)
|
371
|
+
end
|
372
|
+
|
373
|
+
def self.config_node_key(path)
|
374
|
+
Hocon::Impl::PathParser.parse_path_node(path)
|
375
|
+
end
|
376
|
+
|
377
|
+
def self.config_node_single_token(value)
|
378
|
+
Hocon::Impl::ConfigNodeSingleToken.new(value)
|
379
|
+
end
|
380
|
+
|
381
|
+
def self.config_node_object(nodes)
|
382
|
+
Hocon::Impl::ConfigNodeObject.new(nodes)
|
383
|
+
end
|
384
|
+
|
385
|
+
def self.config_node_array(nodes)
|
386
|
+
Hocon::Impl::ConfigNodeArray.new(nodes)
|
387
|
+
end
|
388
|
+
|
389
|
+
def self.config_node_concatenation(nodes)
|
390
|
+
Hocon::Impl::ConfigNodeConcatenation.new(nodes)
|
391
|
+
end
|
392
|
+
|
393
|
+
def self.node_colon
|
394
|
+
Hocon::Impl::ConfigNodeSingleToken.new(Tokens::COLON)
|
395
|
+
end
|
396
|
+
|
397
|
+
def self.node_space
|
398
|
+
Hocon::Impl::ConfigNodeSingleToken.new(token_unquoted(" "))
|
399
|
+
end
|
400
|
+
|
401
|
+
def self.node_open_brace
|
402
|
+
Hocon::Impl::ConfigNodeSingleToken.new(Tokens::OPEN_CURLY)
|
403
|
+
end
|
404
|
+
|
405
|
+
def self.node_close_brace
|
406
|
+
Hocon::Impl::ConfigNodeSingleToken.new(Tokens::CLOSE_CURLY)
|
407
|
+
end
|
408
|
+
|
409
|
+
def self.node_open_bracket
|
410
|
+
Hocon::Impl::ConfigNodeSingleToken.new(Tokens::OPEN_SQUARE)
|
411
|
+
end
|
412
|
+
|
413
|
+
def self.node_close_bracket
|
414
|
+
Hocon::Impl::ConfigNodeSingleToken.new(Tokens::CLOSE_SQUARE)
|
415
|
+
end
|
416
|
+
|
417
|
+
def self.node_comma
|
418
|
+
Hocon::Impl::ConfigNodeSingleToken.new(Tokens::COMMA)
|
419
|
+
end
|
420
|
+
|
421
|
+
def self.node_line(line)
|
422
|
+
Hocon::Impl::ConfigNodeSingleToken.new(token_line(line))
|
423
|
+
end
|
424
|
+
|
425
|
+
def self.node_whitespace(whitespace)
|
426
|
+
Hocon::Impl::ConfigNodeSingleToken.new(token_whitespace(whitespace))
|
427
|
+
end
|
428
|
+
|
429
|
+
def self.node_key_value_pair(key, value)
|
430
|
+
nodes = [key, node_space, node_colon, node_space, value]
|
431
|
+
Hocon::Impl::ConfigNodeField.new(nodes)
|
432
|
+
end
|
433
|
+
|
434
|
+
def self.node_int(value)
|
435
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_int(value))
|
436
|
+
end
|
437
|
+
|
438
|
+
def self.node_string(value)
|
439
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_string(value))
|
440
|
+
end
|
441
|
+
|
442
|
+
def self.node_double(value)
|
443
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_double(value))
|
444
|
+
end
|
445
|
+
|
446
|
+
def self.node_true
|
447
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_true)
|
448
|
+
end
|
449
|
+
|
450
|
+
def self.node_false
|
451
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_false)
|
452
|
+
end
|
453
|
+
|
454
|
+
def self.node_comment_hash(text)
|
455
|
+
Hocon::Impl::ConfigNodeComment.new(token_comment_hash(text))
|
456
|
+
end
|
457
|
+
|
458
|
+
def self.node_comment_double_slash(text)
|
459
|
+
Hocon::Impl::ConfigNodeComment.new(token_comment_double_slash(text))
|
460
|
+
end
|
461
|
+
|
462
|
+
def self.node_unquoted_text(text)
|
463
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_unquoted(text))
|
464
|
+
end
|
465
|
+
|
466
|
+
def self.node_null
|
467
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_null)
|
468
|
+
end
|
469
|
+
|
470
|
+
def self.node_key_substitution(s)
|
471
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_key_substitution(s))
|
472
|
+
end
|
473
|
+
|
474
|
+
def self.node_optional_substitution(*expression)
|
475
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_optional_substitution(*expression))
|
476
|
+
end
|
477
|
+
|
478
|
+
def self.node_substitution(*expression)
|
479
|
+
Hocon::Impl::ConfigNodeSimpleValue.new(token_substitution(*expression))
|
480
|
+
end
|
481
|
+
|
482
|
+
def self.fake_origin
|
483
|
+
Hocon::Impl::SimpleConfigOrigin.new_simple("fake origin")
|
484
|
+
end
|
485
|
+
|
486
|
+
def self.token_line(line_number)
|
487
|
+
Tokens.new_line(fake_origin.with_line_number(line_number))
|
488
|
+
end
|
489
|
+
|
490
|
+
def self.token_true
|
491
|
+
Tokens.new_boolean(fake_origin, true)
|
492
|
+
end
|
493
|
+
|
494
|
+
def self.token_false
|
495
|
+
Tokens.new_boolean(fake_origin, false)
|
496
|
+
end
|
497
|
+
|
498
|
+
def self.token_null
|
499
|
+
Tokens.new_null(fake_origin)
|
500
|
+
end
|
501
|
+
|
502
|
+
def self.token_unquoted(value)
|
503
|
+
Tokens.new_unquoted_text(fake_origin, value)
|
504
|
+
end
|
505
|
+
|
506
|
+
def self.token_comment_double_slash(value)
|
507
|
+
Tokens.new_comment_double_slash(fake_origin, value)
|
508
|
+
end
|
509
|
+
|
510
|
+
def self.token_comment_hash(value)
|
511
|
+
Tokens.new_comment_hash(fake_origin, value)
|
512
|
+
end
|
513
|
+
|
514
|
+
def self.token_whitespace(value)
|
515
|
+
Tokens.new_ignored_whitespace(fake_origin, value)
|
516
|
+
end
|
517
|
+
|
518
|
+
def self.token_string(value)
|
519
|
+
Tokens.new_string(fake_origin, value, "\"#{value}\"")
|
520
|
+
end
|
521
|
+
|
522
|
+
def self.token_double(value)
|
523
|
+
Tokens.new_double(fake_origin, value, "#{value}")
|
524
|
+
end
|
525
|
+
|
526
|
+
def self.token_int(value)
|
527
|
+
Tokens.new_int(fake_origin, value, "#{value}")
|
528
|
+
end
|
529
|
+
|
530
|
+
def self.token_maybe_optional_substitution(optional, token_list)
|
531
|
+
Tokens.new_substitution(fake_origin, optional, token_list)
|
532
|
+
end
|
533
|
+
|
534
|
+
def self.token_substitution(*token_list)
|
535
|
+
token_maybe_optional_substitution(false, token_list)
|
536
|
+
end
|
537
|
+
|
538
|
+
def self.token_optional_substitution(*token_list)
|
539
|
+
token_maybe_optional_substitution(true, token_list)
|
540
|
+
end
|
541
|
+
|
542
|
+
def self.token_key_substitution(value)
|
543
|
+
token_substitution(token_string(value))
|
544
|
+
end
|
545
|
+
|
546
|
+
def self.parse_object(s)
|
547
|
+
parse_config(s).root
|
548
|
+
end
|
549
|
+
|
550
|
+
def self.parse_config(s)
|
551
|
+
options = Hocon::ConfigParseOptions.defaults.
|
552
|
+
set_origin_description("test string").
|
553
|
+
set_syntax(Hocon::ConfigSyntax::CONF)
|
554
|
+
Hocon::ConfigFactory.parse_string(s, options)
|
555
|
+
end
|
556
|
+
|
557
|
+
##################
|
558
|
+
# ConfigValue helpers
|
559
|
+
##################
|
560
|
+
def self.int_value(value)
|
561
|
+
ConfigInt.new(fake_origin, value, nil)
|
562
|
+
end
|
563
|
+
|
564
|
+
def self.double_value(value)
|
565
|
+
ConfigDouble.new(fake_origin, value, nil)
|
566
|
+
end
|
567
|
+
|
568
|
+
def self.string_value(value)
|
569
|
+
ConfigString::Quoted.new(fake_origin, value)
|
570
|
+
end
|
571
|
+
|
572
|
+
def self.null_value
|
573
|
+
ConfigNull.new(fake_origin)
|
574
|
+
end
|
575
|
+
|
576
|
+
def self.bool_value(value)
|
577
|
+
ConfigBoolean.new(fake_origin, value)
|
578
|
+
end
|
579
|
+
|
580
|
+
def self.config_map(input_map)
|
581
|
+
# Turns {String: Int} maps into {String: ConfigInt} maps
|
582
|
+
Hash[ input_map.map { |k, v| [k, int_value(v)] } ]
|
583
|
+
end
|
584
|
+
|
585
|
+
def self.subst(ref, optional = false)
|
586
|
+
path = Path.new_path(ref)
|
587
|
+
ConfigReference.new(fake_origin, SubstitutionExpression.new(path, optional))
|
588
|
+
end
|
589
|
+
|
590
|
+
def self.subst_in_string(ref, optional = false)
|
591
|
+
pieces = [string_value("start<"), subst(ref, optional), string_value(">end")]
|
592
|
+
ConfigConcatenation.new(fake_origin, pieces)
|
593
|
+
end
|
594
|
+
|
595
|
+
##################
|
596
|
+
# Token Functions
|
597
|
+
##################
|
598
|
+
class NotEqualToAnythingElse
|
599
|
+
def ==(other)
|
600
|
+
other.is_a? NotEqualToAnythingElse
|
601
|
+
end
|
602
|
+
|
603
|
+
def hash
|
604
|
+
971
|
605
|
+
end
|
606
|
+
end
|
607
|
+
|
608
|
+
##################
|
609
|
+
# Path Functions
|
610
|
+
##################
|
611
|
+
def self.path(*elements)
|
612
|
+
# this is importantly NOT using Path.newPath, which relies on
|
613
|
+
# the parser; in the test suite we are often testing the parser,
|
614
|
+
# so we don't want to use the parser to build the expected result.
|
615
|
+
Path.from_string_list(elements)
|
616
|
+
end
|
617
|
+
|
618
|
+
RESOURCE_DIR = "spec/fixtures/test_utils/resources"
|
619
|
+
|
620
|
+
def self.resource_file(filename)
|
621
|
+
File.join(RESOURCE_DIR, filename)
|
622
|
+
end
|
623
|
+
|
624
|
+
def self.json_quoted_resource_file(filename)
|
625
|
+
quote_json_string(resource_file(filename).to_s)
|
626
|
+
end
|
627
|
+
|
628
|
+
def self.quote_json_string(s)
|
629
|
+
Hocon::Impl::ConfigImplUtil.render_json_string(s)
|
630
|
+
end
|
631
|
+
|
632
|
+
##################
|
633
|
+
# RSpec Tests
|
634
|
+
##################
|
635
|
+
def self.check_equal_objects(first_object, second_object)
|
636
|
+
it "should find the two objects to be equal" do
|
637
|
+
not_equal_to_anything_else = TestUtils::NotEqualToAnythingElse.new
|
638
|
+
|
639
|
+
# Equality
|
640
|
+
expect(first_object).to eq(second_object)
|
641
|
+
expect(second_object).to eq(first_object)
|
642
|
+
|
643
|
+
# Hashes
|
644
|
+
expect(first_object.hash).to eq(second_object.hash)
|
645
|
+
|
646
|
+
# Other random object
|
647
|
+
expect(first_object).not_to eq(not_equal_to_anything_else)
|
648
|
+
expect(not_equal_to_anything_else).not_to eq(first_object)
|
649
|
+
|
650
|
+
expect(second_object).not_to eq(not_equal_to_anything_else)
|
651
|
+
expect(not_equal_to_anything_else).not_to eq(second_object)
|
652
|
+
end
|
653
|
+
end
|
654
|
+
|
655
|
+
def self.check_not_equal_objects(first_object, second_object)
|
656
|
+
|
657
|
+
it "should find the two objects to be not equal" do
|
658
|
+
not_equal_to_anything_else = TestUtils::NotEqualToAnythingElse.new
|
659
|
+
|
660
|
+
# Equality
|
661
|
+
expect(first_object).not_to eq(second_object)
|
662
|
+
expect(second_object).not_to eq(first_object)
|
663
|
+
|
664
|
+
# Hashes
|
665
|
+
# hashcode inequality isn't guaranteed, but
|
666
|
+
# as long as it happens to work it might
|
667
|
+
# detect a bug (if hashcodes are equal,
|
668
|
+
# check if it's due to a bug or correct
|
669
|
+
# before you remove this)
|
670
|
+
expect(first_object.hash).not_to eq(second_object.hash)
|
671
|
+
|
672
|
+
# Other random object
|
673
|
+
expect(first_object).not_to eq(not_equal_to_anything_else)
|
674
|
+
expect(not_equal_to_anything_else).not_to eq(first_object)
|
675
|
+
|
676
|
+
expect(second_object).not_to eq(not_equal_to_anything_else)
|
677
|
+
expect(not_equal_to_anything_else).not_to eq(second_object)
|
678
|
+
end
|
679
|
+
end
|
680
|
+
end
|
681
|
+
|
682
|
+
|
683
|
+
##################
|
684
|
+
# RSpec Shared Examples
|
685
|
+
##################
|
686
|
+
|
687
|
+
# Examples for comparing an object that won't equal anything but itself
|
688
|
+
# Used in the object_equality examples below
|
689
|
+
shared_examples_for "not_equal_to_other_random_thing" do
|
690
|
+
let(:not_equal_to_anything_else) { TestUtils::NotEqualToAnythingElse.new }
|
691
|
+
|
692
|
+
it "should find the first object not equal to a random other thing" do
|
693
|
+
expect(first_object).not_to eq(not_equal_to_anything_else)
|
694
|
+
expect(not_equal_to_anything_else).not_to eq(first_object)
|
695
|
+
end
|
696
|
+
|
697
|
+
it "should find the second object not equal to a random other thing" do
|
698
|
+
expect(second_object).not_to eq(not_equal_to_anything_else)
|
699
|
+
expect(not_equal_to_anything_else).not_to eq(second_object)
|
700
|
+
end
|
701
|
+
end
|
702
|
+
|
703
|
+
# Examples for making sure two objects are equal
|
704
|
+
shared_examples_for "object_equality" do
|
705
|
+
|
706
|
+
it "should find the first object to be equal to the second object" do
|
707
|
+
expect(first_object).to eq(second_object)
|
708
|
+
end
|
709
|
+
|
710
|
+
it "should find the second object to be equal to the first object" do
|
711
|
+
expect(second_object).to eq(first_object)
|
712
|
+
end
|
713
|
+
|
714
|
+
it "should find the hash codes of the two objects to be equal" do
|
715
|
+
expect(first_object.hash).to eq(second_object.hash)
|
716
|
+
end
|
717
|
+
|
718
|
+
include_examples "not_equal_to_other_random_thing"
|
719
|
+
end
|
720
|
+
|
721
|
+
# Examples for making sure two objects are not equal
|
722
|
+
shared_examples_for "object_inequality" do
|
723
|
+
|
724
|
+
it "should find the first object to not be equal to the second object" do
|
725
|
+
expect(first_object).not_to eq(second_object)
|
726
|
+
end
|
727
|
+
|
728
|
+
it "should find the second object to not be equal to the first object" do
|
729
|
+
expect(second_object).not_to eq(first_object)
|
730
|
+
end
|
731
|
+
|
732
|
+
it "should find the hash codes of the two objects to not be equal" do
|
733
|
+
# hashcode inequality isn't guaranteed, but
|
734
|
+
# as long as it happens to work it might
|
735
|
+
# detect a bug (if hashcodes are equal,
|
736
|
+
# check if it's due to a bug or correct
|
737
|
+
# before you remove this)
|
738
|
+
expect(first_object.hash).not_to eq(second_object.hash)
|
739
|
+
end
|
740
|
+
|
741
|
+
include_examples "not_equal_to_other_random_thing"
|
742
|
+
end
|
743
|
+
|
744
|
+
|
745
|
+
shared_examples_for "path_render_test" do
|
746
|
+
it "should find the expected rendered text equal to the rendered path" do
|
747
|
+
expect(path.render).to eq(expected)
|
748
|
+
end
|
749
|
+
|
750
|
+
it "should find the path equal to the parsed expected text" do
|
751
|
+
expect(Hocon::Impl::PathParser.parse_path(expected)).to eq(path)
|
752
|
+
end
|
753
|
+
|
754
|
+
it "should find the path equal to the parsed text that came from the rendered path" do
|
755
|
+
expect(Hocon::Impl::PathParser.parse_path(path.render)).to eq(path)
|
756
|
+
end
|
757
|
+
end
|