hocon 0.9.5 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +14 -2
- data/README.md +22 -10
- data/lib/hocon.rb +9 -3
- data/lib/hocon/config_factory.rb +4 -0
- data/lib/hocon/config_value_factory.rb +13 -2
- data/lib/hocon/impl/config_reference.rb +5 -2
- data/lib/hocon/impl/simple_config_origin.rb +1 -1
- data/spec/fixtures/parse_render/example1/input.conf +21 -0
- data/spec/fixtures/parse_render/example1/output.conf +26 -0
- data/spec/fixtures/parse_render/example1/output_nocomments.conf +17 -0
- data/spec/fixtures/parse_render/example2/input.conf +10 -0
- data/spec/fixtures/parse_render/example2/output.conf +17 -0
- data/spec/fixtures/parse_render/example2/output_nocomments.conf +17 -0
- data/spec/fixtures/parse_render/example3/input.conf +2 -0
- data/spec/fixtures/parse_render/example3/output.conf +2 -0
- data/spec/fixtures/parse_render/example4/input.json +6 -0
- data/spec/fixtures/parse_render/example4/output.conf +6 -0
- data/spec/fixtures/test_utils/resources/bom.conf +2 -0
- data/spec/fixtures/test_utils/resources/cycle.conf +1 -0
- data/spec/fixtures/test_utils/resources/file-include.conf +5 -0
- data/spec/fixtures/test_utils/resources/include-from-list.conf +4 -0
- data/spec/fixtures/test_utils/resources/subdir/bar.conf +1 -0
- data/spec/fixtures/test_utils/resources/subdir/baz.conf +1 -0
- data/spec/fixtures/test_utils/resources/subdir/foo.conf +5 -0
- data/spec/fixtures/test_utils/resources/test01.conf +80 -0
- data/spec/fixtures/test_utils/resources/test01.json +4 -0
- data/spec/fixtures/test_utils/resources/test03.conf +36 -0
- data/spec/spec_helper.rb +43 -0
- data/spec/test_utils.rb +757 -0
- data/spec/unit/typesafe/config/concatenation_spec.rb +417 -0
- data/spec/unit/typesafe/config/conf_parser_spec.rb +822 -0
- data/spec/unit/typesafe/config/config_document_parser_spec.rb +494 -0
- data/spec/unit/typesafe/config/config_document_spec.rb +576 -0
- data/spec/unit/typesafe/config/config_factory_spec.rb +120 -0
- data/spec/unit/typesafe/config/config_node_spec.rb +552 -0
- data/spec/unit/typesafe/config/config_value_factory_spec.rb +85 -0
- data/spec/unit/typesafe/config/config_value_spec.rb +935 -0
- data/spec/unit/typesafe/config/hocon_spec.rb +54 -0
- data/spec/unit/typesafe/config/path_spec.rb +261 -0
- data/spec/unit/typesafe/config/public_api_spec.rb +520 -0
- data/spec/unit/typesafe/config/simple_config_spec.rb +112 -0
- data/spec/unit/typesafe/config/token_spec.rb +188 -0
- data/spec/unit/typesafe/config/tokenizer_spec.rb +801 -0
- metadata +39 -3
@@ -0,0 +1,112 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
require 'hocon/config_factory'
|
5
|
+
require 'hocon/config_render_options'
|
6
|
+
require 'hocon/config_value_factory'
|
7
|
+
|
8
|
+
describe Hocon::Impl::SimpleConfig do
|
9
|
+
let(:render_options) { Hocon::ConfigRenderOptions.defaults }
|
10
|
+
|
11
|
+
before do
|
12
|
+
render_options.origin_comments = false
|
13
|
+
render_options.json = false
|
14
|
+
end
|
15
|
+
|
16
|
+
shared_examples_for "config_value_retrieval_single_value" do
|
17
|
+
let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
|
18
|
+
it "should allow you to get a value for a specific configuration setting" do
|
19
|
+
expect(conf.get_value(setting).transform_to_string).to eq(expected_setting)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
shared_examples_for "config_value_retrieval_config_list" do
|
24
|
+
let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
|
25
|
+
it "should allow you to get a value for a setting whose value is a data structure" do
|
26
|
+
expect(conf.get_value(setting).
|
27
|
+
render_value_to_sb(StringIO.new, 2, nil,
|
28
|
+
Hocon::ConfigRenderOptions.new(false, false, false, false)).
|
29
|
+
string).to eq(expected_setting)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
shared_examples_for "has_path_check" do
|
34
|
+
let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
|
35
|
+
it "should return true if a path exists" do
|
36
|
+
expect(conf.has_path?(setting)).to eql(true)
|
37
|
+
end
|
38
|
+
|
39
|
+
it "should return false if a path does not exist" do
|
40
|
+
expect(conf.has_path?(false_setting)).to eq(false)
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
shared_examples_for "add_value_to_config" do
|
45
|
+
let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
|
46
|
+
it "should add desired setting with desired value" do
|
47
|
+
modified_conf = conf.with_value(setting_to_add, value_to_add)
|
48
|
+
expect(modified_conf.get_value(setting_to_add)).to eq(value_to_add)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
shared_examples_for "add_data_structures_to_config" do
|
53
|
+
let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
|
54
|
+
it "should add a nested map to a config" do
|
55
|
+
map = Hocon::ConfigValueFactory.from_any_ref({"a" => "b", "c" => {"d" => "e"}}, nil)
|
56
|
+
modified_conf = conf.with_value(setting_to_add, map)
|
57
|
+
expect(modified_conf.get_value(setting_to_add)).to eq(map)
|
58
|
+
end
|
59
|
+
|
60
|
+
it "should add an array to a config" do
|
61
|
+
array = Hocon::ConfigValueFactory.from_any_ref([1,2,3,4,5], nil)
|
62
|
+
modified_conf = conf.with_value(setting_to_add, array)
|
63
|
+
expect(modified_conf.get_value(setting_to_add)).to eq(array)
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
shared_examples_for "remove_value_from_config" do
|
68
|
+
let(:input_file) { "#{FIXTURE_DIR}/parse_render/#{example[:name]}/input.conf" }
|
69
|
+
it "should remove desired setting" do
|
70
|
+
modified_conf = conf.without_path(setting_to_remove)
|
71
|
+
expect(modified_conf.has_path?(setting_to_remove)).to be false
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
context "example1" do
|
76
|
+
let(:example) { EXAMPLE1 }
|
77
|
+
let(:setting) { "foo.bar.yahoo" }
|
78
|
+
let(:expected_setting) { "yippee" }
|
79
|
+
let(:false_setting) { "non-existent" }
|
80
|
+
let(:setting_to_add) { "foo.bar.test" }
|
81
|
+
let(:value_to_add) { Hocon::Impl::ConfigString.new(nil, "This is a test string") }
|
82
|
+
let(:setting_to_remove) { "foo.bar" }
|
83
|
+
|
84
|
+
context "parsing a .conf file" do
|
85
|
+
let(:conf) { Hocon::ConfigFactory.parse_file(input_file) }
|
86
|
+
include_examples "config_value_retrieval_single_value"
|
87
|
+
include_examples "has_path_check"
|
88
|
+
include_examples "add_value_to_config"
|
89
|
+
include_examples "add_data_structures_to_config"
|
90
|
+
include_examples "remove_value_from_config"
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
context "example2" do
|
95
|
+
let(:example) { EXAMPLE2 }
|
96
|
+
let(:setting) { "jruby-puppet.jruby-pools" }
|
97
|
+
let(:expected_setting) { "[{environment=production}]" }
|
98
|
+
let(:false_setting) { "jruby-puppet-false" }
|
99
|
+
let(:setting_to_add) { "top" }
|
100
|
+
let(:value_to_add) { Hocon::Impl::ConfigInt.new(nil, 12345, "12345") }
|
101
|
+
let(:setting_to_remove) { "jruby-puppet.master-conf-dir" }
|
102
|
+
|
103
|
+
context "parsing a .conf file" do
|
104
|
+
let(:conf) { Hocon::ConfigFactory.parse_file(input_file) }
|
105
|
+
include_examples "config_value_retrieval_config_list"
|
106
|
+
include_examples "has_path_check"
|
107
|
+
include_examples "add_value_to_config"
|
108
|
+
include_examples "add_data_structures_to_config"
|
109
|
+
include_examples "remove_value_from_config"
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
@@ -0,0 +1,188 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
require 'hocon'
|
5
|
+
require 'test_utils'
|
6
|
+
require 'pp'
|
7
|
+
|
8
|
+
|
9
|
+
describe Hocon::Impl::Token do
|
10
|
+
Tokens = Hocon::Impl::Tokens
|
11
|
+
|
12
|
+
####################
|
13
|
+
# Equality
|
14
|
+
####################
|
15
|
+
context "check token equality" do
|
16
|
+
context "syntax tokens" do
|
17
|
+
let(:first_object) { Tokens::START }
|
18
|
+
let(:second_object) { Tokens::START }
|
19
|
+
|
20
|
+
include_examples "object_equality"
|
21
|
+
end
|
22
|
+
|
23
|
+
context "integer tokens" do
|
24
|
+
let(:first_object) { TestUtils.token_int(42) }
|
25
|
+
let(:second_object) { TestUtils.token_int(42) }
|
26
|
+
|
27
|
+
include_examples "object_equality"
|
28
|
+
end
|
29
|
+
|
30
|
+
context "truth tokens" do
|
31
|
+
let(:first_object) { TestUtils.token_true }
|
32
|
+
let(:second_object) { TestUtils.token_true }
|
33
|
+
|
34
|
+
include_examples "object_equality"
|
35
|
+
end
|
36
|
+
|
37
|
+
context "int and double of the same value" do
|
38
|
+
let(:first_object) { TestUtils.token_int(10) }
|
39
|
+
let(:second_object) { TestUtils.token_double(10.0) }
|
40
|
+
|
41
|
+
include_examples "object_equality"
|
42
|
+
end
|
43
|
+
|
44
|
+
context "double tokens" do
|
45
|
+
let(:first_object) { TestUtils.token_int(3.14) }
|
46
|
+
let(:second_object) { TestUtils.token_int(3.14) }
|
47
|
+
|
48
|
+
include_examples "object_equality"
|
49
|
+
end
|
50
|
+
|
51
|
+
context "quoted string tokens" do
|
52
|
+
let(:first_object) { TestUtils.token_string("foo") }
|
53
|
+
let(:second_object) { TestUtils.token_string("foo") }
|
54
|
+
|
55
|
+
include_examples "object_equality"
|
56
|
+
end
|
57
|
+
|
58
|
+
context "unquoted string tokens" do
|
59
|
+
let(:first_object) { TestUtils.token_unquoted("foo") }
|
60
|
+
let(:second_object) { TestUtils.token_unquoted("foo") }
|
61
|
+
|
62
|
+
include_examples "object_equality"
|
63
|
+
end
|
64
|
+
|
65
|
+
context "key substitution tokens" do
|
66
|
+
let(:first_object) { TestUtils.token_key_substitution("foo") }
|
67
|
+
let(:second_object) { TestUtils.token_key_substitution("foo") }
|
68
|
+
|
69
|
+
include_examples "object_equality"
|
70
|
+
end
|
71
|
+
|
72
|
+
context "null tokens" do
|
73
|
+
let(:first_object) { TestUtils.token_null }
|
74
|
+
let(:second_object) { TestUtils.token_null }
|
75
|
+
|
76
|
+
include_examples "object_equality"
|
77
|
+
end
|
78
|
+
|
79
|
+
context "newline tokens" do
|
80
|
+
let(:first_object) { TestUtils.token_line(10) }
|
81
|
+
let(:second_object) { TestUtils.token_line(10) }
|
82
|
+
|
83
|
+
include_examples "object_equality"
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
|
88
|
+
####################
|
89
|
+
# Inequality
|
90
|
+
####################
|
91
|
+
context "check token inequality" do
|
92
|
+
context "syntax tokens" do
|
93
|
+
let(:first_object) { Tokens::START }
|
94
|
+
let(:second_object) { Tokens::OPEN_CURLY }
|
95
|
+
|
96
|
+
include_examples "object_inequality"
|
97
|
+
end
|
98
|
+
|
99
|
+
context "integer tokens" do
|
100
|
+
let(:first_object) { TestUtils.token_int(42) }
|
101
|
+
let(:second_object) { TestUtils.token_int(43) }
|
102
|
+
|
103
|
+
include_examples "object_inequality"
|
104
|
+
end
|
105
|
+
|
106
|
+
context "double tokens" do
|
107
|
+
let(:first_object) { TestUtils.token_int(3.14) }
|
108
|
+
let(:second_object) { TestUtils.token_int(4.14) }
|
109
|
+
|
110
|
+
include_examples "object_inequality"
|
111
|
+
end
|
112
|
+
|
113
|
+
context "truth tokens" do
|
114
|
+
let(:first_object) { TestUtils.token_true }
|
115
|
+
let(:second_object) { TestUtils.token_false }
|
116
|
+
|
117
|
+
include_examples "object_inequality"
|
118
|
+
end
|
119
|
+
|
120
|
+
context "quoted string tokens" do
|
121
|
+
let(:first_object) { TestUtils.token_string("foo") }
|
122
|
+
let(:second_object) { TestUtils.token_string("bar") }
|
123
|
+
|
124
|
+
include_examples "object_inequality"
|
125
|
+
end
|
126
|
+
|
127
|
+
context "unquoted string tokens" do
|
128
|
+
let(:first_object) { TestUtils.token_unquoted("foo") }
|
129
|
+
let(:second_object) { TestUtils.token_unquoted("bar") }
|
130
|
+
|
131
|
+
include_examples "object_inequality"
|
132
|
+
end
|
133
|
+
|
134
|
+
context "key substitution tokens" do
|
135
|
+
let(:first_object) { TestUtils.token_key_substitution("foo") }
|
136
|
+
let(:second_object) { TestUtils.token_key_substitution("bar") }
|
137
|
+
|
138
|
+
include_examples "object_inequality"
|
139
|
+
end
|
140
|
+
|
141
|
+
context "newline tokens" do
|
142
|
+
let(:first_object) { TestUtils.token_line(10) }
|
143
|
+
let(:second_object) { TestUtils.token_line(11) }
|
144
|
+
|
145
|
+
include_examples "object_inequality"
|
146
|
+
end
|
147
|
+
|
148
|
+
context "true and int tokens" do
|
149
|
+
let(:first_object) { TestUtils.token_true }
|
150
|
+
let(:second_object) { TestUtils.token_int(1) }
|
151
|
+
|
152
|
+
include_examples "object_inequality"
|
153
|
+
end
|
154
|
+
|
155
|
+
context "string 'true' and true tokens" do
|
156
|
+
let(:first_object) { TestUtils.token_true }
|
157
|
+
let(:second_object) { TestUtils.token_string("true") }
|
158
|
+
|
159
|
+
include_examples "object_inequality"
|
160
|
+
end
|
161
|
+
|
162
|
+
context "int and double of slightly different values" do
|
163
|
+
let(:first_object) { TestUtils.token_int(10) }
|
164
|
+
let(:second_object) { TestUtils.token_double(10.000001) }
|
165
|
+
|
166
|
+
include_examples "object_inequality"
|
167
|
+
end
|
168
|
+
end
|
169
|
+
|
170
|
+
context "Check that to_s doesn't throw exception" do
|
171
|
+
it "shouldn't throw an exception" do
|
172
|
+
# just be sure to_s doesn't throw an exception. It's for debugging
|
173
|
+
# so its exact output doesn't matter a lot
|
174
|
+
TestUtils.token_true.to_s
|
175
|
+
TestUtils.token_false.to_s
|
176
|
+
TestUtils.token_int(42).to_s
|
177
|
+
TestUtils.token_double(3.14).to_s
|
178
|
+
TestUtils.token_null.to_s
|
179
|
+
TestUtils.token_unquoted("foo").to_s
|
180
|
+
TestUtils.token_string("bar").to_s
|
181
|
+
TestUtils.token_key_substitution("a").to_s
|
182
|
+
TestUtils.token_line(10).to_s
|
183
|
+
Tokens::START.to_s
|
184
|
+
Tokens::EOF.to_s
|
185
|
+
Tokens::COLON.to_s
|
186
|
+
end
|
187
|
+
end
|
188
|
+
end
|
@@ -0,0 +1,801 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
require 'hocon'
|
5
|
+
require 'test_utils'
|
6
|
+
require 'pp'
|
7
|
+
|
8
|
+
|
9
|
+
describe Hocon::Impl::Tokenizer do
|
10
|
+
Tokens = Hocon::Impl::Tokens
|
11
|
+
|
12
|
+
shared_examples_for "token_matching" do
|
13
|
+
it "should match the tokenized string to the list of expected tokens" do
|
14
|
+
tokenized_from_string = TestUtils.tokenize_as_list(test_string)
|
15
|
+
tokenized_as_string = TestUtils.tokenize_as_string(test_string)
|
16
|
+
|
17
|
+
# Add START and EOF tokens
|
18
|
+
wrapped_tokens = TestUtils.wrap_tokens(expected_tokens)
|
19
|
+
|
20
|
+
# Compare the two lists of tokens
|
21
|
+
expect(tokenized_from_string).to eq(wrapped_tokens)
|
22
|
+
expect(tokenized_as_string).to eq(test_string)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
|
26
|
+
shared_examples_for "strings_with_problems" do
|
27
|
+
it "should find a problem when tokenizing" do
|
28
|
+
token_list = TestUtils.tokenize_as_list(test_string)
|
29
|
+
expect(token_list.map { |token| Tokens.problem?(token) }).to include(true)
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
####################
|
34
|
+
# Whitespace
|
35
|
+
####################
|
36
|
+
context "tokenizing whitespace" do
|
37
|
+
context "tokenize empty string" do
|
38
|
+
let(:test_string) { "" }
|
39
|
+
let(:expected_tokens) { [] }
|
40
|
+
|
41
|
+
include_examples "token_matching"
|
42
|
+
end
|
43
|
+
|
44
|
+
context "tokenize newlines" do
|
45
|
+
let(:test_string) { "\n\n" }
|
46
|
+
let(:expected_tokens) { [TestUtils.token_line(1),
|
47
|
+
TestUtils.token_line(2)] }
|
48
|
+
|
49
|
+
include_examples "token_matching"
|
50
|
+
end
|
51
|
+
|
52
|
+
context "tokenize unquoted text should keep spaces" do
|
53
|
+
let(:test_string) { " foo \n" }
|
54
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
55
|
+
TestUtils.token_unquoted("foo"),
|
56
|
+
TestUtils.token_whitespace(" "),
|
57
|
+
TestUtils.token_line(1)] }
|
58
|
+
|
59
|
+
include_examples "token_matching"
|
60
|
+
end
|
61
|
+
|
62
|
+
context "tokenize unquoted text with internal spaces should keep spaces" do
|
63
|
+
let(:test_string) { " foo bar baz \n" }
|
64
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
65
|
+
TestUtils.token_unquoted("foo"),
|
66
|
+
TestUtils.token_unquoted(" "),
|
67
|
+
TestUtils.token_unquoted("bar"),
|
68
|
+
TestUtils.token_unquoted(" "),
|
69
|
+
TestUtils.token_unquoted("baz"),
|
70
|
+
TestUtils.token_whitespace(" "),
|
71
|
+
TestUtils.token_line(1)] }
|
72
|
+
|
73
|
+
include_examples "token_matching"
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
####################
|
78
|
+
# Booleans and Null
|
79
|
+
####################
|
80
|
+
context "tokenizing booleans and null" do
|
81
|
+
context "tokenize true and unquoted text" do
|
82
|
+
let(:test_string) { "truefoo" }
|
83
|
+
let(:expected_tokens) { [TestUtils.token_true,
|
84
|
+
TestUtils.token_unquoted("foo")] }
|
85
|
+
|
86
|
+
include_examples "token_matching"
|
87
|
+
end
|
88
|
+
|
89
|
+
context "tokenize false and unquoted text" do
|
90
|
+
let(:test_string) { "falsefoo" }
|
91
|
+
let(:expected_tokens) { [TestUtils.token_false,
|
92
|
+
TestUtils.token_unquoted("foo")] }
|
93
|
+
|
94
|
+
include_examples "token_matching"
|
95
|
+
end
|
96
|
+
|
97
|
+
context "tokenize null and unquoted text" do
|
98
|
+
let(:test_string) { "nullfoo" }
|
99
|
+
let(:expected_tokens) { [TestUtils.token_null,
|
100
|
+
TestUtils.token_unquoted("foo")] }
|
101
|
+
|
102
|
+
include_examples "token_matching"
|
103
|
+
end
|
104
|
+
|
105
|
+
context "tokenize unquoted text containing true" do
|
106
|
+
let(:test_string) { "footrue" }
|
107
|
+
let(:expected_tokens) { [TestUtils.token_unquoted("footrue")] }
|
108
|
+
|
109
|
+
include_examples "token_matching"
|
110
|
+
end
|
111
|
+
|
112
|
+
context "tokenize unquoted text containing space and true" do
|
113
|
+
let(:test_string) { "foo true" }
|
114
|
+
let(:expected_tokens) { [TestUtils.token_unquoted("foo"),
|
115
|
+
TestUtils.token_unquoted(" "),
|
116
|
+
TestUtils.token_true] }
|
117
|
+
|
118
|
+
include_examples "token_matching"
|
119
|
+
end
|
120
|
+
|
121
|
+
context "tokenize true and space and unquoted text" do
|
122
|
+
let(:test_string) { "true foo" }
|
123
|
+
let(:expected_tokens) { [TestUtils.token_true,
|
124
|
+
TestUtils.token_unquoted(" "),
|
125
|
+
TestUtils.token_unquoted("foo")] }
|
126
|
+
|
127
|
+
include_examples "token_matching"
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
####################
|
132
|
+
# Slashes
|
133
|
+
####################
|
134
|
+
context "tokenizing slashes" do
|
135
|
+
context "tokenize unquoted text containing slash" do
|
136
|
+
let(:test_string) { "a/b/c/" }
|
137
|
+
let(:expected_tokens) { [TestUtils.token_unquoted("a/b/c/")] }
|
138
|
+
|
139
|
+
include_examples "token_matching"
|
140
|
+
end
|
141
|
+
|
142
|
+
context "tokenize slash" do
|
143
|
+
let(:test_string) { "/" }
|
144
|
+
let(:expected_tokens) { [TestUtils.token_unquoted("/")] }
|
145
|
+
|
146
|
+
include_examples "token_matching"
|
147
|
+
end
|
148
|
+
|
149
|
+
context "tokenize slash space slash" do
|
150
|
+
let(:test_string) { "/ /" }
|
151
|
+
let(:expected_tokens) { [TestUtils.token_unquoted("/"),
|
152
|
+
TestUtils.token_unquoted(" "),
|
153
|
+
TestUtils.token_unquoted("/")] }
|
154
|
+
|
155
|
+
include_examples "token_matching"
|
156
|
+
end
|
157
|
+
|
158
|
+
####################
|
159
|
+
# Quotes
|
160
|
+
####################
|
161
|
+
context "tokenize mixed unquoted and quoted" do
|
162
|
+
let(:test_string) { " foo\"bar\"baz \n" }
|
163
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
164
|
+
TestUtils.token_unquoted("foo"),
|
165
|
+
TestUtils.token_string("bar"),
|
166
|
+
TestUtils.token_unquoted("baz"),
|
167
|
+
TestUtils.token_whitespace(" "),
|
168
|
+
TestUtils.token_line(1)] }
|
169
|
+
|
170
|
+
include_examples "token_matching"
|
171
|
+
end
|
172
|
+
|
173
|
+
context "tokenize empty triple quoted string" do
|
174
|
+
let(:test_string) { '""""""' }
|
175
|
+
let(:expected_tokens) { [TestUtils.token_string("")] }
|
176
|
+
|
177
|
+
include_examples "token_matching"
|
178
|
+
end
|
179
|
+
|
180
|
+
context "tokenize trivial triple quoted string" do
|
181
|
+
let(:test_string) { '"""bar"""' }
|
182
|
+
let(:expected_tokens) { [TestUtils.token_string("bar")] }
|
183
|
+
|
184
|
+
include_examples "token_matching"
|
185
|
+
end
|
186
|
+
|
187
|
+
context "tokenize no escapes in triple quoted string" do
|
188
|
+
let(:test_string) { '"""\n"""' }
|
189
|
+
let(:expected_tokens) { [TestUtils.token_string('\n')] }
|
190
|
+
|
191
|
+
include_examples "token_matching"
|
192
|
+
end
|
193
|
+
|
194
|
+
context "tokenize trailing quotes in triple quoted string" do
|
195
|
+
let(:test_string) { '"""""""""' }
|
196
|
+
let(:expected_tokens) { [TestUtils.token_string('"""')] }
|
197
|
+
|
198
|
+
include_examples "token_matching"
|
199
|
+
end
|
200
|
+
|
201
|
+
context "tokenize new line in triple quoted string" do
|
202
|
+
let(:test_string) { '"""foo\nbar"""' }
|
203
|
+
let(:expected_tokens) { [TestUtils.token_string('foo\nbar')] }
|
204
|
+
|
205
|
+
include_examples "token_matching"
|
206
|
+
end
|
207
|
+
end
|
208
|
+
|
209
|
+
####################
|
210
|
+
# Find problems when tokenizing
|
211
|
+
####################
|
212
|
+
context "finding problems when tokenizing" do
|
213
|
+
context "nothing after backslash" do
|
214
|
+
let(:test_string) { ' "\" ' }
|
215
|
+
include_examples "strings_with_problems"
|
216
|
+
end
|
217
|
+
|
218
|
+
context "there is no \q escape sequence" do
|
219
|
+
let(:test_string) { ' "\q" ' }
|
220
|
+
include_examples "strings_with_problems"
|
221
|
+
end
|
222
|
+
|
223
|
+
context "unicode byte sequence missing a byte" do
|
224
|
+
let(:test_string) { '"\u123"' }
|
225
|
+
include_examples "strings_with_problems"
|
226
|
+
end
|
227
|
+
|
228
|
+
context "unicode byte sequence missing two bytes" do
|
229
|
+
let(:test_string) { '"\u12"' }
|
230
|
+
include_examples "strings_with_problems"
|
231
|
+
end
|
232
|
+
|
233
|
+
context "unicode byte sequence missing three bytes" do
|
234
|
+
let(:test_string) { '"\u1"' }
|
235
|
+
include_examples "strings_with_problems"
|
236
|
+
end
|
237
|
+
|
238
|
+
context "unicode byte missing" do
|
239
|
+
let(:test_string) { '"\u"' }
|
240
|
+
include_examples "strings_with_problems"
|
241
|
+
end
|
242
|
+
|
243
|
+
context "just a single quote" do
|
244
|
+
let(:test_string) { '"' }
|
245
|
+
include_examples "strings_with_problems"
|
246
|
+
end
|
247
|
+
|
248
|
+
context "no end quote" do
|
249
|
+
let(:test_string) { ' "abcdefg' }
|
250
|
+
include_examples "strings_with_problems"
|
251
|
+
end
|
252
|
+
|
253
|
+
context "file ends with a backslash" do
|
254
|
+
let(:test_string) { '\"\\' }
|
255
|
+
include_examples "strings_with_problems"
|
256
|
+
end
|
257
|
+
|
258
|
+
context "file ends with a $" do
|
259
|
+
let(:test_string) { "$" }
|
260
|
+
include_examples "strings_with_problems"
|
261
|
+
end
|
262
|
+
|
263
|
+
context "file ends with a ${" do
|
264
|
+
let(:test_string) { "${" }
|
265
|
+
include_examples "strings_with_problems"
|
266
|
+
end
|
267
|
+
end
|
268
|
+
|
269
|
+
####################
|
270
|
+
# Numbers
|
271
|
+
####################
|
272
|
+
context "tokenizing numbers" do
|
273
|
+
context "parse positive float" do
|
274
|
+
let(:test_string) { "1.2" }
|
275
|
+
let(:expected_tokens) { [TestUtils.token_double(1.2)] }
|
276
|
+
include_examples "token_matching"
|
277
|
+
end
|
278
|
+
|
279
|
+
context "parse negative float" do
|
280
|
+
let(:test_string) { "-1.2" }
|
281
|
+
let(:expected_tokens) { [TestUtils.token_double(-1.2)] }
|
282
|
+
include_examples "token_matching"
|
283
|
+
end
|
284
|
+
|
285
|
+
context "parse exponent notation" do
|
286
|
+
let(:test_string) { "1e6" }
|
287
|
+
let(:expected_tokens) { [TestUtils.token_double(1e6)] }
|
288
|
+
include_examples "token_matching"
|
289
|
+
end
|
290
|
+
|
291
|
+
context "parse negative exponent" do
|
292
|
+
let(:test_string) { "1e-6" }
|
293
|
+
let(:expected_tokens) { [TestUtils.token_double(1e-6)] }
|
294
|
+
include_examples "token_matching"
|
295
|
+
end
|
296
|
+
|
297
|
+
context "parse exponent with capital E" do
|
298
|
+
let(:test_string) { "1E-6" }
|
299
|
+
let(:expected_tokens) { [TestUtils.token_double(1e-6)] }
|
300
|
+
include_examples "token_matching"
|
301
|
+
end
|
302
|
+
|
303
|
+
context "parse negative int" do
|
304
|
+
let(:test_string) { "-1" }
|
305
|
+
let(:expected_tokens) { [TestUtils.token_int(-1)] }
|
306
|
+
include_examples "token_matching"
|
307
|
+
end
|
308
|
+
end
|
309
|
+
|
310
|
+
####################
|
311
|
+
# Comments
|
312
|
+
####################
|
313
|
+
context "tokenizing comments" do
|
314
|
+
context "tokenize two slashes as comment" do
|
315
|
+
let(:test_string) { "//" }
|
316
|
+
let(:expected_tokens) { [TestUtils.token_comment_double_slash("")] }
|
317
|
+
|
318
|
+
include_examples "token_matching"
|
319
|
+
end
|
320
|
+
|
321
|
+
context "tokenize two slashes in string as string" do
|
322
|
+
let(:test_string) { '"//bar"' }
|
323
|
+
let(:expected_tokens) { [TestUtils.token_string("//bar")] }
|
324
|
+
|
325
|
+
include_examples "token_matching"
|
326
|
+
end
|
327
|
+
|
328
|
+
context "tokenize hash in string as string" do
|
329
|
+
let(:test_string) { '"#bar"' }
|
330
|
+
let(:expected_tokens) { [TestUtils.token_string("#bar")] }
|
331
|
+
|
332
|
+
include_examples "token_matching"
|
333
|
+
end
|
334
|
+
|
335
|
+
context "tokenize slash comment after unquoted text" do
|
336
|
+
let(:test_string) { "bar//comment" }
|
337
|
+
let(:expected_tokens) { [TestUtils.token_unquoted("bar"),
|
338
|
+
TestUtils.token_comment_double_slash("comment")] }
|
339
|
+
|
340
|
+
include_examples "token_matching"
|
341
|
+
end
|
342
|
+
|
343
|
+
context "tokenize hash comment after unquoted text" do
|
344
|
+
let(:test_string) { "bar#comment" }
|
345
|
+
let(:expected_tokens) { [TestUtils.token_unquoted("bar"),
|
346
|
+
TestUtils.token_comment_hash("comment")] }
|
347
|
+
|
348
|
+
include_examples "token_matching"
|
349
|
+
end
|
350
|
+
|
351
|
+
context "tokenize slash comment after int" do
|
352
|
+
let(:test_string) { "10//comment" }
|
353
|
+
let(:expected_tokens) { [TestUtils.token_int(10),
|
354
|
+
TestUtils.token_comment_double_slash("comment")] }
|
355
|
+
|
356
|
+
include_examples "token_matching"
|
357
|
+
end
|
358
|
+
|
359
|
+
context "tokenize hash comment after int" do
|
360
|
+
let(:test_string) { "10#comment" }
|
361
|
+
let(:expected_tokens) { [TestUtils.token_int(10),
|
362
|
+
TestUtils.token_comment_hash("comment")] }
|
363
|
+
|
364
|
+
include_examples "token_matching"
|
365
|
+
end
|
366
|
+
|
367
|
+
context "tokenize hash comment after int" do
|
368
|
+
let(:test_string) { "10#comment" }
|
369
|
+
let(:expected_tokens) { [TestUtils.token_int(10),
|
370
|
+
TestUtils.token_comment_hash("comment")] }
|
371
|
+
|
372
|
+
include_examples "token_matching"
|
373
|
+
end
|
374
|
+
|
375
|
+
context "tokenize slash comment after float" do
|
376
|
+
let(:test_string) { "3.14//comment" }
|
377
|
+
let(:expected_tokens) { [TestUtils.token_double(3.14),
|
378
|
+
TestUtils.token_comment_double_slash("comment")] }
|
379
|
+
|
380
|
+
include_examples "token_matching"
|
381
|
+
end
|
382
|
+
|
383
|
+
context "tokenize hash comment after float" do
|
384
|
+
let(:test_string) { "3.14#comment" }
|
385
|
+
let(:expected_tokens) { [TestUtils.token_double(3.14),
|
386
|
+
TestUtils.token_comment_hash("comment")] }
|
387
|
+
|
388
|
+
include_examples "token_matching"
|
389
|
+
end
|
390
|
+
|
391
|
+
context "tokenize slash comment with newline" do
|
392
|
+
let(:test_string) { "10//comment\n12" }
|
393
|
+
let(:expected_tokens) { [TestUtils.token_int(10),
|
394
|
+
TestUtils.token_comment_double_slash("comment"),
|
395
|
+
TestUtils.token_line(1),
|
396
|
+
TestUtils.token_int(12)] }
|
397
|
+
|
398
|
+
include_examples "token_matching"
|
399
|
+
end
|
400
|
+
|
401
|
+
context "tokenize hash comment with newline" do
|
402
|
+
let(:test_string) { "10#comment\n12" }
|
403
|
+
let(:expected_tokens) { [TestUtils.token_int(10),
|
404
|
+
TestUtils.token_comment_hash("comment"),
|
405
|
+
TestUtils.token_line(1),
|
406
|
+
TestUtils.token_int(12)] }
|
407
|
+
|
408
|
+
include_examples "token_matching"
|
409
|
+
end
|
410
|
+
|
411
|
+
context "tokenize slash comments on two consecutive lines" do
|
412
|
+
let(:test_string) { "//comment\n//comment2" }
|
413
|
+
let(:expected_tokens) { [TestUtils.token_comment_double_slash("comment"),
|
414
|
+
TestUtils.token_line(1),
|
415
|
+
TestUtils.token_comment_double_slash("comment2")] }
|
416
|
+
|
417
|
+
include_examples "token_matching"
|
418
|
+
end
|
419
|
+
|
420
|
+
context "tokenize hash comments on two consecutive lines" do
|
421
|
+
let(:test_string) { "#comment\n#comment2" }
|
422
|
+
let(:expected_tokens) { [TestUtils.token_comment_hash("comment"),
|
423
|
+
TestUtils.token_line(1),
|
424
|
+
TestUtils.token_comment_hash("comment2")] }
|
425
|
+
include_examples "token_matching"
|
426
|
+
end
|
427
|
+
|
428
|
+
context "tokenize slash comments on multiple lines with whitespace" do
|
429
|
+
let(:test_string) { " //comment\r\n //comment2 \n//comment3 \n\n//comment4" }
|
430
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
431
|
+
TestUtils.token_comment_double_slash("comment\r"),
|
432
|
+
TestUtils.token_line(1),
|
433
|
+
TestUtils.token_whitespace(" "),
|
434
|
+
TestUtils.token_comment_double_slash("comment2 "),
|
435
|
+
TestUtils.token_line(2),
|
436
|
+
TestUtils.token_comment_double_slash("comment3 "),
|
437
|
+
TestUtils.token_line(3),
|
438
|
+
TestUtils.token_line(4),
|
439
|
+
TestUtils.token_comment_double_slash("comment4")] }
|
440
|
+
|
441
|
+
include_examples "token_matching"
|
442
|
+
end
|
443
|
+
|
444
|
+
context "tokenize hash comments on multiple lines with whitespace" do
|
445
|
+
let(:test_string) { " #comment\r\n #comment2 \n#comment3 \n\n#comment4" }
|
446
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
447
|
+
TestUtils.token_comment_hash("comment\r"),
|
448
|
+
TestUtils.token_line(1),
|
449
|
+
TestUtils.token_whitespace(" "),
|
450
|
+
TestUtils.token_comment_hash("comment2 "),
|
451
|
+
TestUtils.token_line(2),
|
452
|
+
TestUtils.token_comment_hash("comment3 "),
|
453
|
+
TestUtils.token_line(3),
|
454
|
+
TestUtils.token_line(4),
|
455
|
+
TestUtils.token_comment_hash("comment4")] }
|
456
|
+
|
457
|
+
include_examples "token_matching"
|
458
|
+
end
|
459
|
+
end
|
460
|
+
|
461
|
+
####################
|
462
|
+
# Brackets, braces
|
463
|
+
####################
|
464
|
+
context "tokenizing brackets and braces" do
|
465
|
+
context "tokenize open curly braces" do
|
466
|
+
let(:test_string) { "{{" }
|
467
|
+
let(:expected_tokens) { [Tokens::OPEN_CURLY, Tokens::OPEN_CURLY] }
|
468
|
+
|
469
|
+
include_examples "token_matching"
|
470
|
+
end
|
471
|
+
|
472
|
+
context "tokenize close curly braces" do
|
473
|
+
let(:test_string) { "}}" }
|
474
|
+
let(:expected_tokens) { [Tokens::CLOSE_CURLY, Tokens::CLOSE_CURLY] }
|
475
|
+
|
476
|
+
include_examples "token_matching"
|
477
|
+
end
|
478
|
+
|
479
|
+
context "tokenize open and close curly braces" do
|
480
|
+
let(:test_string) { "{}" }
|
481
|
+
let(:expected_tokens) { [Tokens::OPEN_CURLY, Tokens::CLOSE_CURLY] }
|
482
|
+
|
483
|
+
include_examples "token_matching"
|
484
|
+
end
|
485
|
+
|
486
|
+
context "tokenize open and close curly braces" do
|
487
|
+
let(:test_string) { "{}" }
|
488
|
+
let(:expected_tokens) { [Tokens::OPEN_CURLY, Tokens::CLOSE_CURLY] }
|
489
|
+
|
490
|
+
include_examples "token_matching"
|
491
|
+
end
|
492
|
+
|
493
|
+
context "tokenize open square brackets" do
|
494
|
+
let(:test_string) { "[[" }
|
495
|
+
let(:expected_tokens) { [Tokens::OPEN_SQUARE, Tokens::OPEN_SQUARE] }
|
496
|
+
|
497
|
+
include_examples "token_matching"
|
498
|
+
end
|
499
|
+
|
500
|
+
context "tokenize close square brackets" do
|
501
|
+
let(:test_string) { "]]" }
|
502
|
+
let(:expected_tokens) { [Tokens::CLOSE_SQUARE, Tokens::CLOSE_SQUARE] }
|
503
|
+
|
504
|
+
include_examples "token_matching"
|
505
|
+
end
|
506
|
+
|
507
|
+
context "tokenize open and close square brackets" do
|
508
|
+
let(:test_string) { "[]" }
|
509
|
+
let(:expected_tokens) { [Tokens::OPEN_SQUARE, Tokens::CLOSE_SQUARE] }
|
510
|
+
|
511
|
+
include_examples "token_matching"
|
512
|
+
end
|
513
|
+
end
|
514
|
+
|
515
|
+
####################
|
516
|
+
# comma, colon, equals, plus equals
|
517
|
+
####################
|
518
|
+
context "tokenizing comma, colon, equals, and plus equals" do
|
519
|
+
context "tokenize comma" do
|
520
|
+
let(:test_string) { "," }
|
521
|
+
let(:expected_tokens) { [Tokens::COMMA] }
|
522
|
+
|
523
|
+
include_examples "token_matching"
|
524
|
+
end
|
525
|
+
|
526
|
+
context "tokenize colon" do
|
527
|
+
let(:test_string) { ":" }
|
528
|
+
let(:expected_tokens) { [Tokens::COLON] }
|
529
|
+
|
530
|
+
include_examples "token_matching"
|
531
|
+
end
|
532
|
+
|
533
|
+
context "tokenize equals" do
|
534
|
+
let(:test_string) { "=" }
|
535
|
+
let(:expected_tokens) { [Tokens::EQUALS] }
|
536
|
+
|
537
|
+
include_examples "token_matching"
|
538
|
+
end
|
539
|
+
|
540
|
+
context "tokenize plus equals" do
|
541
|
+
let(:test_string) { "+=" }
|
542
|
+
let(:expected_tokens) { [Tokens::PLUS_EQUALS] }
|
543
|
+
|
544
|
+
include_examples "token_matching"
|
545
|
+
end
|
546
|
+
|
547
|
+
context "tokenize comma, colon, plus equals, and equals together" do
|
548
|
+
let(:test_string) { "=:,+=" }
|
549
|
+
let(:expected_tokens) { [Tokens::EQUALS,
|
550
|
+
Tokens::COLON,
|
551
|
+
Tokens::COMMA,
|
552
|
+
Tokens::PLUS_EQUALS] }
|
553
|
+
|
554
|
+
include_examples "token_matching"
|
555
|
+
end
|
556
|
+
end
|
557
|
+
|
558
|
+
####################
|
559
|
+
# Substitutions
|
560
|
+
####################
|
561
|
+
context "tokenizing substitutions" do
|
562
|
+
context "tokenize substitution" do
|
563
|
+
let(:test_string) { "${a.b}" }
|
564
|
+
let(:expected_tokens) { [TestUtils.token_substitution(TestUtils.token_unquoted("a.b"))] }
|
565
|
+
|
566
|
+
include_examples "token_matching"
|
567
|
+
end
|
568
|
+
|
569
|
+
context "tokenize optional substitution" do
|
570
|
+
let(:test_string) { "${?x.y}" }
|
571
|
+
let(:expected_tokens) { [TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y"))] }
|
572
|
+
|
573
|
+
include_examples "token_matching"
|
574
|
+
end
|
575
|
+
|
576
|
+
context "tokenize key substitution" do
|
577
|
+
let(:test_string) { '${"c.d"}' }
|
578
|
+
let(:expected_tokens) { [TestUtils.token_key_substitution("c.d")] }
|
579
|
+
|
580
|
+
include_examples "token_matching"
|
581
|
+
end
|
582
|
+
end
|
583
|
+
|
584
|
+
####################
|
585
|
+
# Unicode and escape characters
|
586
|
+
####################
|
587
|
+
context "tokenizing unicode and escape characters" do
|
588
|
+
context "tokenize unicode infinity symbol" do
|
589
|
+
let(:test_string) { '"\u221E"' }
|
590
|
+
let(:expected_tokens) { [TestUtils.token_string("\u{221E}")] }
|
591
|
+
|
592
|
+
include_examples "token_matching"
|
593
|
+
end
|
594
|
+
|
595
|
+
context "tokenize null byte" do
|
596
|
+
let(:test_string) { ' "\u0000" ' }
|
597
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
598
|
+
TestUtils.token_string("\u0000"),
|
599
|
+
TestUtils.token_whitespace(" ")] }
|
600
|
+
|
601
|
+
include_examples "token_matching"
|
602
|
+
end
|
603
|
+
|
604
|
+
context "tokenize various espace codes" do
|
605
|
+
let(:test_string) { ' "\"\\\/\b\f\n\r\t" ' }
|
606
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
607
|
+
TestUtils.token_string("\"\\/\b\f\n\r\t"),
|
608
|
+
TestUtils.token_whitespace(" ")] }
|
609
|
+
|
610
|
+
include_examples "token_matching"
|
611
|
+
end
|
612
|
+
|
613
|
+
context "tokenize unicode F" do
|
614
|
+
let(:test_string) { ' "\u0046" ' }
|
615
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
616
|
+
TestUtils.token_string("F"),
|
617
|
+
TestUtils.token_whitespace(" ")] }
|
618
|
+
|
619
|
+
include_examples "token_matching"
|
620
|
+
end
|
621
|
+
|
622
|
+
context "tokenize two unicode Fs" do
|
623
|
+
let(:test_string) { ' "\u0046\u0046" ' }
|
624
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
625
|
+
TestUtils.token_string("FF"),
|
626
|
+
TestUtils.token_whitespace(" ")] }
|
627
|
+
|
628
|
+
include_examples "token_matching"
|
629
|
+
end
|
630
|
+
end
|
631
|
+
|
632
|
+
####################
|
633
|
+
# Reserved Characters
|
634
|
+
####################
|
635
|
+
context "Finding problems with using reserved characters" do
|
636
|
+
context "problem with reserved character +" do
|
637
|
+
let(:test_string) { "+" }
|
638
|
+
include_examples "strings_with_problems"
|
639
|
+
end
|
640
|
+
|
641
|
+
context "problem with reserved character `" do
|
642
|
+
let(:test_string) { "`" }
|
643
|
+
include_examples "strings_with_problems"
|
644
|
+
end
|
645
|
+
|
646
|
+
context "problem with reserved character ^" do
|
647
|
+
let(:test_string) { "^" }
|
648
|
+
include_examples "strings_with_problems"
|
649
|
+
end
|
650
|
+
|
651
|
+
context "problem with reserved character ?" do
|
652
|
+
let(:test_string) { "?" }
|
653
|
+
include_examples "strings_with_problems"
|
654
|
+
end
|
655
|
+
|
656
|
+
context "problem with reserved character !" do
|
657
|
+
let(:test_string) { "!" }
|
658
|
+
include_examples "strings_with_problems"
|
659
|
+
end
|
660
|
+
|
661
|
+
context "problem with reserved character @" do
|
662
|
+
let(:test_string) { "@" }
|
663
|
+
include_examples "strings_with_problems"
|
664
|
+
end
|
665
|
+
|
666
|
+
context "problem with reserved character *" do
|
667
|
+
let(:test_string) { "*" }
|
668
|
+
include_examples "strings_with_problems"
|
669
|
+
end
|
670
|
+
|
671
|
+
context "problem with reserved character &" do
|
672
|
+
let(:test_string) { "&" }
|
673
|
+
include_examples "strings_with_problems"
|
674
|
+
end
|
675
|
+
|
676
|
+
context "problem with reserved character \\" do
|
677
|
+
let(:test_string) { "\\" }
|
678
|
+
include_examples "strings_with_problems"
|
679
|
+
end
|
680
|
+
end
|
681
|
+
|
682
|
+
####################
|
683
|
+
# Combine all types
|
684
|
+
####################
|
685
|
+
context "Tokenizing all types together" do
|
686
|
+
context "tokenize all types no spaces" do
|
687
|
+
let(:test_string) { ',:=}{][+="foo""""bar"""true3.14false42null${a.b}${?x.y}${"c.d"}' + "\n" }
|
688
|
+
let(:expected_tokens) { [Tokens::COMMA,
|
689
|
+
Tokens::COLON,
|
690
|
+
Tokens::EQUALS,
|
691
|
+
Tokens::CLOSE_CURLY,
|
692
|
+
Tokens::OPEN_CURLY,
|
693
|
+
Tokens::CLOSE_SQUARE,
|
694
|
+
Tokens::OPEN_SQUARE,
|
695
|
+
Tokens::PLUS_EQUALS,
|
696
|
+
TestUtils.token_string("foo"),
|
697
|
+
TestUtils.token_string("bar"),
|
698
|
+
TestUtils.token_true,
|
699
|
+
TestUtils.token_double(3.14),
|
700
|
+
TestUtils.token_false,
|
701
|
+
TestUtils.token_int(42),
|
702
|
+
TestUtils.token_null,
|
703
|
+
TestUtils.token_substitution(TestUtils.token_unquoted("a.b")),
|
704
|
+
TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y")),
|
705
|
+
TestUtils.token_key_substitution("c.d"),
|
706
|
+
TestUtils.token_line(1)] }
|
707
|
+
|
708
|
+
include_examples "token_matching"
|
709
|
+
end
|
710
|
+
|
711
|
+
context "tokenize all types single spaces" do
|
712
|
+
let(:test_string) { ' , : = } { ] [ += "foo" """bar""" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} ' + "\n " }
|
713
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
714
|
+
Tokens::COMMA,
|
715
|
+
TestUtils.token_whitespace(" "),
|
716
|
+
Tokens::COLON,
|
717
|
+
TestUtils.token_whitespace(" "),
|
718
|
+
Tokens::EQUALS,
|
719
|
+
TestUtils.token_whitespace(" "),
|
720
|
+
Tokens::CLOSE_CURLY,
|
721
|
+
TestUtils.token_whitespace(" "),
|
722
|
+
Tokens::OPEN_CURLY,
|
723
|
+
TestUtils.token_whitespace(" "),
|
724
|
+
Tokens::CLOSE_SQUARE,
|
725
|
+
TestUtils.token_whitespace(" "),
|
726
|
+
Tokens::OPEN_SQUARE,
|
727
|
+
TestUtils.token_whitespace(" "),
|
728
|
+
Tokens::PLUS_EQUALS,
|
729
|
+
TestUtils.token_whitespace(" "),
|
730
|
+
TestUtils.token_string("foo"),
|
731
|
+
TestUtils.token_unquoted(" "),
|
732
|
+
TestUtils.token_string("bar"),
|
733
|
+
TestUtils.token_unquoted(" "),
|
734
|
+
TestUtils.token_int(42),
|
735
|
+
TestUtils.token_unquoted(" "),
|
736
|
+
TestUtils.token_true,
|
737
|
+
TestUtils.token_unquoted(" "),
|
738
|
+
TestUtils.token_double(3.14),
|
739
|
+
TestUtils.token_unquoted(" "),
|
740
|
+
TestUtils.token_false,
|
741
|
+
TestUtils.token_unquoted(" "),
|
742
|
+
TestUtils.token_null,
|
743
|
+
TestUtils.token_unquoted(" "),
|
744
|
+
TestUtils.token_substitution(TestUtils.token_unquoted("a.b")),
|
745
|
+
TestUtils.token_unquoted(" "),
|
746
|
+
TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y")),
|
747
|
+
TestUtils.token_unquoted(" "),
|
748
|
+
TestUtils.token_key_substitution("c.d"),
|
749
|
+
TestUtils.token_whitespace(" "),
|
750
|
+
TestUtils.token_line(1),
|
751
|
+
TestUtils.token_whitespace(" ")] }
|
752
|
+
|
753
|
+
include_examples "token_matching"
|
754
|
+
end
|
755
|
+
|
756
|
+
context "tokenize all types multiple spaces" do
|
757
|
+
let(:test_string) { ' , : = } { ] [ += "foo" """bar""" 42 true 3.14 false null ${a.b} ${?x.y} ${"c.d"} ' + "\n " }
|
758
|
+
let(:expected_tokens) { [TestUtils.token_whitespace(" "),
|
759
|
+
Tokens::COMMA,
|
760
|
+
TestUtils.token_whitespace(" "),
|
761
|
+
Tokens::COLON,
|
762
|
+
TestUtils.token_whitespace(" "),
|
763
|
+
Tokens::EQUALS,
|
764
|
+
TestUtils.token_whitespace(" "),
|
765
|
+
Tokens::CLOSE_CURLY,
|
766
|
+
TestUtils.token_whitespace(" "),
|
767
|
+
Tokens::OPEN_CURLY,
|
768
|
+
TestUtils.token_whitespace(" "),
|
769
|
+
Tokens::CLOSE_SQUARE,
|
770
|
+
TestUtils.token_whitespace(" "),
|
771
|
+
Tokens::OPEN_SQUARE,
|
772
|
+
TestUtils.token_whitespace(" "),
|
773
|
+
Tokens::PLUS_EQUALS,
|
774
|
+
TestUtils.token_whitespace(" "),
|
775
|
+
TestUtils.token_string("foo"),
|
776
|
+
TestUtils.token_unquoted(" "),
|
777
|
+
TestUtils.token_string("bar"),
|
778
|
+
TestUtils.token_unquoted(" "),
|
779
|
+
TestUtils.token_int(42),
|
780
|
+
TestUtils.token_unquoted(" "),
|
781
|
+
TestUtils.token_true,
|
782
|
+
TestUtils.token_unquoted(" "),
|
783
|
+
TestUtils.token_double(3.14),
|
784
|
+
TestUtils.token_unquoted(" "),
|
785
|
+
TestUtils.token_false,
|
786
|
+
TestUtils.token_unquoted(" "),
|
787
|
+
TestUtils.token_null,
|
788
|
+
TestUtils.token_unquoted(" "),
|
789
|
+
TestUtils.token_substitution(TestUtils.token_unquoted("a.b")),
|
790
|
+
TestUtils.token_unquoted(" "),
|
791
|
+
TestUtils.token_optional_substitution(TestUtils.token_unquoted("x.y")),
|
792
|
+
TestUtils.token_unquoted(" "),
|
793
|
+
TestUtils.token_key_substitution("c.d"),
|
794
|
+
TestUtils.token_whitespace(" "),
|
795
|
+
TestUtils.token_line(1),
|
796
|
+
TestUtils.token_whitespace(" ")] }
|
797
|
+
|
798
|
+
include_examples "token_matching"
|
799
|
+
end
|
800
|
+
end
|
801
|
+
end
|