puppet-lint 2.5.2 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +522 -0
- data/lib/puppet-lint/bin.rb +71 -6
- data/lib/puppet-lint/checkplugin.rb +43 -9
- data/lib/puppet-lint/checks.rb +16 -16
- data/lib/puppet-lint/configuration.rb +134 -134
- data/lib/puppet-lint/data.rb +28 -28
- data/lib/puppet-lint/lexer/string_slurper.rb +138 -140
- data/lib/puppet-lint/lexer/token.rb +188 -190
- data/lib/puppet-lint/lexer.rb +416 -417
- data/lib/puppet-lint/monkeypatches.rb +1 -1
- data/lib/puppet-lint/optparser.rb +5 -1
- data/lib/puppet-lint/plugins/check_classes/arrow_on_right_operand_line.rb +6 -4
- data/lib/puppet-lint/plugins/check_classes/autoloader_layout.rb +5 -3
- data/lib/puppet-lint/plugins/check_classes/class_inherits_from_params_class.rb +6 -4
- data/lib/puppet-lint/plugins/check_classes/code_on_top_scope.rb +5 -3
- data/lib/puppet-lint/plugins/check_classes/inherits_across_namespaces.rb +5 -3
- data/lib/puppet-lint/plugins/check_classes/names_containing_dash.rb +5 -3
- data/lib/puppet-lint/plugins/check_classes/names_containing_uppercase.rb +7 -5
- data/lib/puppet-lint/plugins/check_classes/nested_classes_or_defines.rb +5 -3
- data/lib/puppet-lint/plugins/check_classes/parameter_order.rb +7 -4
- data/lib/puppet-lint/plugins/check_classes/right_to_left_relationship.rb +5 -3
- data/lib/puppet-lint/plugins/check_classes/variable_scope.rb +15 -13
- data/lib/puppet-lint/plugins/check_comments/slash_comments.rb +9 -7
- data/lib/puppet-lint/plugins/check_comments/star_comments.rb +10 -8
- data/lib/puppet-lint/plugins/check_conditionals/case_without_default.rb +6 -4
- data/lib/puppet-lint/plugins/check_conditionals/selector_inside_resource.rb +5 -3
- data/lib/puppet-lint/plugins/check_documentation/documentation.rb +7 -3
- data/lib/puppet-lint/plugins/check_nodes/unquoted_node_name.rb +15 -11
- data/lib/puppet-lint/plugins/check_resources/duplicate_params.rb +5 -3
- data/lib/puppet-lint/plugins/check_resources/ensure_first_param.rb +8 -5
- data/lib/puppet-lint/plugins/check_resources/ensure_not_symlink_target.rb +11 -8
- data/lib/puppet-lint/plugins/check_resources/file_mode.rb +14 -9
- data/lib/puppet-lint/plugins/check_resources/unquoted_file_mode.rb +11 -6
- data/lib/puppet-lint/plugins/check_resources/unquoted_resource_title.rb +6 -4
- data/lib/puppet-lint/plugins/check_strings/double_quoted_strings.rb +12 -7
- data/lib/puppet-lint/plugins/check_strings/only_variable_string.rb +8 -6
- data/lib/puppet-lint/plugins/check_strings/puppet_url_without_modules.rb +14 -8
- data/lib/puppet-lint/plugins/check_strings/quoted_booleans.rb +11 -7
- data/lib/puppet-lint/plugins/check_strings/single_quote_string_with_variables.rb +11 -6
- data/lib/puppet-lint/plugins/check_strings/variables_not_enclosed.rb +12 -8
- data/lib/puppet-lint/plugins/check_variables/variable_contains_dash.rb +11 -7
- data/lib/puppet-lint/plugins/check_variables/variable_is_lowercase.rb +11 -7
- data/lib/puppet-lint/plugins/check_whitespace/140chars.rb +3 -8
- data/lib/puppet-lint/plugins/check_whitespace/2sp_soft_tabs.rb +10 -8
- data/lib/puppet-lint/plugins/check_whitespace/80chars.rb +3 -8
- data/lib/puppet-lint/plugins/check_whitespace/arrow_alignment.rb +10 -8
- data/lib/puppet-lint/plugins/check_whitespace/hard_tabs.rb +11 -7
- data/lib/puppet-lint/plugins/check_whitespace/line_length.rb +29 -0
- data/lib/puppet-lint/plugins/check_whitespace/trailing_whitespace.rb +13 -7
- data/lib/puppet-lint/plugins.rb +63 -61
- data/lib/puppet-lint/report/github.rb +17 -0
- data/lib/puppet-lint/report/sarif_template.json +63 -0
- data/lib/puppet-lint/tasks/puppet-lint.rb +84 -83
- data/lib/puppet-lint/tasks/release_test.rb +4 -1
- data/lib/puppet-lint/version.rb +1 -1
- data/lib/puppet-lint.rb +27 -12
- data/spec/acceptance/puppet_lint_spec.rb +46 -0
- data/spec/spec_helper.rb +92 -91
- data/spec/spec_helper_acceptance.rb +6 -0
- data/spec/spec_helper_acceptance_local.rb +38 -0
- data/spec/{puppet-lint → unit/puppet-lint}/bin_spec.rb +79 -35
- data/spec/{puppet-lint → unit/puppet-lint}/checks_spec.rb +36 -36
- data/spec/unit/puppet-lint/configuration_spec.rb +88 -0
- data/spec/{puppet-lint → unit/puppet-lint}/data_spec.rb +6 -3
- data/spec/{puppet-lint → unit/puppet-lint}/ignore_overrides_spec.rb +17 -17
- data/spec/{puppet-lint → unit/puppet-lint}/lexer/string_slurper_spec.rb +128 -128
- data/spec/{puppet-lint → unit/puppet-lint}/lexer/token_spec.rb +1 -1
- data/spec/{puppet-lint → unit/puppet-lint}/lexer_spec.rb +653 -671
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/arrow_on_right_operand_line_spec.rb +16 -16
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/autoloader_layout_spec.rb +13 -13
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/class_inherits_from_params_class_spec.rb +3 -3
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/code_on_top_scope_spec.rb +4 -4
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/inherits_across_namespaces_spec.rb +4 -4
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/name_contains_uppercase_spec.rb +10 -10
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/names_containing_dash_spec.rb +7 -7
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/nested_classes_or_defines_spec.rb +7 -7
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/parameter_order_spec.rb +9 -9
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/right_to_left_relationship_spec.rb +3 -3
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/variable_scope_spec.rb +25 -25
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_comments/slash_comments_spec.rb +7 -7
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_comments/star_comments_spec.rb +13 -13
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_conditionals/case_without_default_spec.rb +10 -10
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_conditionals/selector_inside_resource_spec.rb +3 -3
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_documentation/documentation_spec.rb +8 -8
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_nodes/unquoted_node_name_spec.rb +24 -24
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/duplicate_params_spec.rb +9 -9
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/ensure_first_param_spec.rb +19 -19
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/ensure_not_symlink_target_spec.rb +10 -10
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/file_mode_spec.rb +40 -40
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/unquoted_file_mode_spec.rb +20 -20
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/unquoted_resource_title_spec.rb +24 -24
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/double_quoted_strings_spec.rb +27 -27
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/only_variable_string_spec.rb +18 -18
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/puppet_url_without_modules_spec.rb +9 -9
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/quoted_booleans_spec.rb +22 -22
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/single_quote_string_with_variables_spec.rb +2 -2
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/variables_not_enclosed_spec.rb +21 -21
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_variables/variable_contains_dash_spec.rb +6 -6
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_variables/variable_is_lowercase_spec.rb +7 -7
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/140chars_spec.rb +5 -5
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/2sp_soft_tabs_spec.rb +2 -2
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/80chars_spec.rb +6 -6
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/arrow_alignment_spec.rb +127 -127
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/hard_tabs_spec.rb +7 -7
- data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/trailing_whitespace_spec.rb +15 -15
- data/spec/unit/puppet-lint/puppet-lint_spec.rb +18 -0
- metadata +63 -119
- data/CHANGELOG.md +0 -33
- data/HISTORY.md +0 -1130
- data/spec/puppet-lint/configuration_spec.rb +0 -66
- data/spec/puppet-lint_spec.rb +0 -16
@@ -2,73 +2,74 @@
|
|
2
2
|
|
3
3
|
require 'spec_helper'
|
4
4
|
|
5
|
-
describe PuppetLint::Lexer do
|
6
|
-
|
7
|
-
|
5
|
+
describe PuppetLint::Lexer do
|
6
|
+
subject(:lexer) do
|
7
|
+
described_class.new
|
8
8
|
end
|
9
9
|
|
10
10
|
context 'invalid code' do
|
11
|
-
it '
|
12
|
-
expect {
|
11
|
+
it 'borks' do
|
12
|
+
expect { lexer.tokenise('^') }.to raise_error(PuppetLint::LexerError)
|
13
13
|
end
|
14
14
|
end
|
15
15
|
|
16
16
|
context '#new_token' do
|
17
|
-
it '
|
18
|
-
token =
|
17
|
+
it 'calculates the line number for an empty string' do
|
18
|
+
token = lexer.new_token(:TEST, 'test')
|
19
19
|
expect(token.line).to eq(1)
|
20
20
|
end
|
21
21
|
|
22
|
-
it '
|
23
|
-
|
24
|
-
token =
|
22
|
+
it 'gets correct line number after double quoted multi line string' do
|
23
|
+
lexer.new_token(:STRING, "test\ntest")
|
24
|
+
token = lexer.new_token(:TEST, 'test')
|
25
25
|
expect(token.line).to eq(2)
|
26
26
|
end
|
27
27
|
|
28
|
-
it '
|
29
|
-
|
30
|
-
token =
|
28
|
+
it 'gets correct line number after a multi line comment' do
|
29
|
+
lexer.new_token(:MLCOMMENT, "test\ntest", raw: "/*test\ntest*/")
|
30
|
+
token = lexer.new_token(:TEST, 'test')
|
31
31
|
expect(token.line).to eq(2)
|
32
32
|
end
|
33
33
|
|
34
|
-
it '
|
35
|
-
|
36
|
-
token =
|
34
|
+
it 'calculates the line number for a multi line string' do
|
35
|
+
lexer.new_token(:SSTRING, "test\ntest")
|
36
|
+
token = lexer.new_token(:TEST, 'test')
|
37
37
|
expect(token.line).to eq(2)
|
38
38
|
end
|
39
39
|
|
40
|
-
it '
|
41
|
-
|
42
|
-
token =
|
40
|
+
it 'calculates line number for string that ends with newline' do
|
41
|
+
lexer.new_token(:SSTRING, "test\n")
|
42
|
+
token = lexer.new_token(:TEST, 'test')
|
43
43
|
expect(token.line).to eq(2)
|
44
44
|
end
|
45
45
|
|
46
|
-
it '
|
47
|
-
token =
|
46
|
+
it 'calculates the column number for an empty string' do
|
47
|
+
token = lexer.new_token(:TEST, 'test')
|
48
48
|
expect(token.column).to eq(1)
|
49
49
|
end
|
50
50
|
|
51
|
-
it '
|
52
|
-
|
53
|
-
token =
|
51
|
+
it 'calculates the column number for a single line string' do
|
52
|
+
lexer.new_token(:SSTRING, 'this is a test')
|
53
|
+
token = lexer.new_token(:TEST, 'test')
|
54
54
|
expect(token.column).to eq(17)
|
55
55
|
end
|
56
56
|
|
57
|
-
it '
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
token =
|
57
|
+
it 'calculates the column number for a multi line string' do
|
58
|
+
lexer.instance_variable_set('@line_no', 4)
|
59
|
+
lexer.instance_variable_set('@column', 5)
|
60
|
+
lexer.new_token(:SSTRING, "test\ntest")
|
61
|
+
token = lexer.new_token(:TEST, 'test')
|
62
62
|
expect(token.column).to eq(6)
|
63
63
|
end
|
64
64
|
end
|
65
65
|
|
66
66
|
context '#process_string_segments' do
|
67
|
-
subject(:tokens) {
|
68
|
-
|
67
|
+
subject(:tokens) { lexer.tokens }
|
68
|
+
|
69
|
+
subject(:manifest) { lexer.tokens.map(&:to_manifest).join }
|
69
70
|
|
70
71
|
before(:each) do
|
71
|
-
|
72
|
+
lexer.process_string_segments(segments)
|
72
73
|
end
|
73
74
|
|
74
75
|
context 'an empty string segment' do
|
@@ -81,10 +82,10 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
81
82
|
it 'creates a :STRING token' do
|
82
83
|
expect(tokens).to have(1).token
|
83
84
|
expect(tokens[0]).to have_attributes(
|
84
|
-
:
|
85
|
-
:
|
86
|
-
:
|
87
|
-
:
|
85
|
+
type: :STRING,
|
86
|
+
value: '',
|
87
|
+
line: 1,
|
88
|
+
column: 1,
|
88
89
|
)
|
89
90
|
end
|
90
91
|
|
@@ -105,22 +106,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
105
106
|
it 'creates a tokenised string with an interpolated variable' do
|
106
107
|
expect(tokens).to have(3).tokens
|
107
108
|
expect(tokens[0]).to have_attributes(
|
108
|
-
:
|
109
|
-
:
|
110
|
-
:
|
111
|
-
:
|
109
|
+
type: :DQPRE,
|
110
|
+
value: '',
|
111
|
+
line: 1,
|
112
|
+
column: 1,
|
112
113
|
)
|
113
114
|
expect(tokens[1]).to have_attributes(
|
114
|
-
:
|
115
|
-
:
|
116
|
-
:
|
117
|
-
:
|
115
|
+
type: :VARIABLE,
|
116
|
+
value: 'foo',
|
117
|
+
line: 1,
|
118
|
+
column: 4,
|
118
119
|
)
|
119
120
|
expect(tokens[2]).to have_attributes(
|
120
|
-
:
|
121
|
-
:
|
122
|
-
:
|
123
|
-
:
|
121
|
+
type: :DQPOST,
|
122
|
+
value: 'bar',
|
123
|
+
line: 1,
|
124
|
+
column: 7,
|
124
125
|
)
|
125
126
|
end
|
126
127
|
|
@@ -141,22 +142,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
141
142
|
it 'creates a tokenised string with an interpolated variable' do
|
142
143
|
expect(tokens).to have(3).tokens
|
143
144
|
expect(tokens[0]).to have_attributes(
|
144
|
-
:
|
145
|
-
:
|
146
|
-
:
|
147
|
-
:
|
145
|
+
type: :DQPRE,
|
146
|
+
value: 'foo',
|
147
|
+
line: 1,
|
148
|
+
column: 1,
|
148
149
|
)
|
149
150
|
expect(tokens[1]).to have_attributes(
|
150
|
-
:
|
151
|
-
:
|
152
|
-
:
|
153
|
-
:
|
151
|
+
type: :VARIABLE,
|
152
|
+
value: 'bar',
|
153
|
+
line: 1,
|
154
|
+
column: 7,
|
154
155
|
)
|
155
156
|
expect(tokens[2]).to have_attributes(
|
156
|
-
:
|
157
|
-
:
|
158
|
-
:
|
159
|
-
:
|
157
|
+
type: :DQPOST,
|
158
|
+
value: 'baz',
|
159
|
+
line: 1,
|
160
|
+
column: 10,
|
160
161
|
)
|
161
162
|
end
|
162
163
|
|
@@ -180,34 +181,34 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
180
181
|
expect(tokens).to have(5).tokens
|
181
182
|
|
182
183
|
expect(tokens[0]).to have_attributes(
|
183
|
-
:
|
184
|
-
:
|
185
|
-
:
|
186
|
-
:
|
184
|
+
type: :DQPRE,
|
185
|
+
value: 'foo',
|
186
|
+
line: 1,
|
187
|
+
column: 1,
|
187
188
|
)
|
188
189
|
expect(tokens[1]).to have_attributes(
|
189
|
-
:
|
190
|
-
:
|
191
|
-
:
|
192
|
-
:
|
190
|
+
type: :VARIABLE,
|
191
|
+
value: 'bar',
|
192
|
+
line: 1,
|
193
|
+
column: 7,
|
193
194
|
)
|
194
195
|
expect(tokens[2]).to have_attributes(
|
195
|
-
:
|
196
|
-
:
|
197
|
-
:
|
198
|
-
:
|
196
|
+
type: :DQMID,
|
197
|
+
value: 'baz',
|
198
|
+
line: 1,
|
199
|
+
column: 10,
|
199
200
|
)
|
200
201
|
expect(tokens[3]).to have_attributes(
|
201
|
-
:
|
202
|
-
:
|
203
|
-
:
|
204
|
-
:
|
202
|
+
type: :VARIABLE,
|
203
|
+
value: 'gronk',
|
204
|
+
line: 1,
|
205
|
+
column: 16,
|
205
206
|
)
|
206
207
|
expect(tokens[4]).to have_attributes(
|
207
|
-
:
|
208
|
-
:
|
209
|
-
:
|
210
|
-
:
|
208
|
+
type: :DQPOST,
|
209
|
+
value: 'meh',
|
210
|
+
line: 1,
|
211
|
+
column: 21,
|
211
212
|
)
|
212
213
|
end
|
213
214
|
|
@@ -229,22 +230,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
229
230
|
expect(tokens).to have(3).tokens
|
230
231
|
|
231
232
|
expect(tokens[0]).to have_attributes(
|
232
|
-
:
|
233
|
-
:
|
234
|
-
:
|
235
|
-
:
|
233
|
+
type: :DQPRE,
|
234
|
+
value: '',
|
235
|
+
line: 1,
|
236
|
+
column: 1,
|
236
237
|
)
|
237
238
|
expect(tokens[1]).to have_attributes(
|
238
|
-
:
|
239
|
-
:
|
240
|
-
:
|
241
|
-
:
|
239
|
+
type: :VARIABLE,
|
240
|
+
value: 'foo',
|
241
|
+
line: 1,
|
242
|
+
column: 4,
|
242
243
|
)
|
243
244
|
expect(tokens[2]).to have_attributes(
|
244
|
-
:
|
245
|
-
:
|
246
|
-
:
|
247
|
-
:
|
245
|
+
type: :DQPOST,
|
246
|
+
value: '',
|
247
|
+
line: 1,
|
248
|
+
column: 7,
|
248
249
|
)
|
249
250
|
end
|
250
251
|
|
@@ -254,7 +255,7 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
254
255
|
end
|
255
256
|
|
256
257
|
context 'treats a variable named the same as the keyword as a variable' do
|
257
|
-
PuppetLint::Lexer::KEYWORDS.
|
258
|
+
PuppetLint::Lexer::KEYWORDS.each_key do |keyword|
|
258
259
|
context "for '#{keyword}'" do
|
259
260
|
let(:segments) do
|
260
261
|
[
|
@@ -268,22 +269,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
268
269
|
expect(tokens).to have(3).tokens
|
269
270
|
|
270
271
|
expect(tokens[0]).to have_attributes(
|
271
|
-
:
|
272
|
-
:
|
273
|
-
:
|
274
|
-
:
|
272
|
+
type: :DQPRE,
|
273
|
+
value: '',
|
274
|
+
line: 1,
|
275
|
+
column: 1,
|
275
276
|
)
|
276
277
|
expect(tokens[1]).to have_attributes(
|
277
|
-
:
|
278
|
-
:
|
279
|
-
:
|
280
|
-
:
|
278
|
+
type: :VARIABLE,
|
279
|
+
value: keyword,
|
280
|
+
line: 1,
|
281
|
+
column: 4,
|
281
282
|
)
|
282
283
|
expect(tokens[2]).to have_attributes(
|
283
|
-
:
|
284
|
-
:
|
285
|
-
:
|
286
|
-
:
|
284
|
+
type: :DQPOST,
|
285
|
+
value: '',
|
286
|
+
line: 1,
|
287
|
+
column: keyword.size + 4,
|
287
288
|
)
|
288
289
|
end
|
289
290
|
|
@@ -307,22 +308,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
307
308
|
expect(tokens).to have(3).tokens
|
308
309
|
|
309
310
|
expect(tokens[0]).to have_attributes(
|
310
|
-
:
|
311
|
-
:
|
312
|
-
:
|
313
|
-
:
|
311
|
+
type: :DQPRE,
|
312
|
+
value: '',
|
313
|
+
line: 1,
|
314
|
+
column: 1,
|
314
315
|
)
|
315
316
|
expect(tokens[1]).to have_attributes(
|
316
|
-
:
|
317
|
-
:
|
318
|
-
:
|
319
|
-
:
|
317
|
+
type: :VARIABLE,
|
318
|
+
value: 'bar',
|
319
|
+
line: 1,
|
320
|
+
column: 4,
|
320
321
|
)
|
321
322
|
expect(tokens[2]).to have_attributes(
|
322
|
-
:
|
323
|
-
:
|
324
|
-
:
|
325
|
-
:
|
323
|
+
type: :DQPOST,
|
324
|
+
value: '',
|
325
|
+
line: 1,
|
326
|
+
column: 8,
|
326
327
|
)
|
327
328
|
end
|
328
329
|
|
@@ -344,58 +345,58 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
344
345
|
expect(tokens).to have(9).tokens
|
345
346
|
|
346
347
|
expect(tokens[0]).to have_attributes(
|
347
|
-
:
|
348
|
-
:
|
349
|
-
:
|
350
|
-
:
|
348
|
+
type: :DQPRE,
|
349
|
+
value: '',
|
350
|
+
line: 1,
|
351
|
+
column: 1,
|
351
352
|
)
|
352
353
|
expect(tokens[1]).to have_attributes(
|
353
|
-
:
|
354
|
-
:
|
355
|
-
:
|
356
|
-
:
|
354
|
+
type: :VARIABLE,
|
355
|
+
value: 'foo',
|
356
|
+
line: 1,
|
357
|
+
column: 4,
|
357
358
|
)
|
358
359
|
expect(tokens[2]).to have_attributes(
|
359
|
-
:
|
360
|
-
:
|
361
|
-
:
|
362
|
-
:
|
360
|
+
type: :LBRACK,
|
361
|
+
value: '[',
|
362
|
+
line: 1,
|
363
|
+
column: 7,
|
363
364
|
)
|
364
365
|
expect(tokens[3]).to have_attributes(
|
365
|
-
:
|
366
|
-
:
|
367
|
-
:
|
368
|
-
:
|
366
|
+
type: :NAME,
|
367
|
+
value: 'bar',
|
368
|
+
line: 1,
|
369
|
+
column: 8,
|
369
370
|
)
|
370
371
|
expect(tokens[4]).to have_attributes(
|
371
|
-
:
|
372
|
-
:
|
373
|
-
:
|
374
|
-
:
|
372
|
+
type: :RBRACK,
|
373
|
+
value: ']',
|
374
|
+
line: 1,
|
375
|
+
column: 11,
|
375
376
|
)
|
376
377
|
expect(tokens[5]).to have_attributes(
|
377
|
-
:
|
378
|
-
:
|
379
|
-
:
|
380
|
-
:
|
378
|
+
type: :LBRACK,
|
379
|
+
value: '[',
|
380
|
+
line: 1,
|
381
|
+
column: 12,
|
381
382
|
)
|
382
383
|
expect(tokens[6]).to have_attributes(
|
383
|
-
:
|
384
|
-
:
|
385
|
-
:
|
386
|
-
:
|
384
|
+
type: :NAME,
|
385
|
+
value: 'baz',
|
386
|
+
line: 1,
|
387
|
+
column: 13,
|
387
388
|
)
|
388
389
|
expect(tokens[7]).to have_attributes(
|
389
|
-
:
|
390
|
-
:
|
391
|
-
:
|
392
|
-
:
|
390
|
+
type: :RBRACK,
|
391
|
+
value: ']',
|
392
|
+
line: 1,
|
393
|
+
column: 16,
|
393
394
|
)
|
394
395
|
expect(tokens[8]).to have_attributes(
|
395
|
-
:
|
396
|
-
:
|
397
|
-
:
|
398
|
-
:
|
396
|
+
type: :DQPOST,
|
397
|
+
value: '',
|
398
|
+
line: 1,
|
399
|
+
column: 17,
|
399
400
|
)
|
400
401
|
end
|
401
402
|
|
@@ -419,34 +420,34 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
419
420
|
expect(tokens).to have(5).tokens
|
420
421
|
|
421
422
|
expect(tokens[0]).to have_attributes(
|
422
|
-
:
|
423
|
-
:
|
424
|
-
:
|
425
|
-
:
|
423
|
+
type: :DQPRE,
|
424
|
+
value: '',
|
425
|
+
line: 1,
|
426
|
+
column: 1,
|
426
427
|
)
|
427
428
|
expect(tokens[1]).to have_attributes(
|
428
|
-
:
|
429
|
-
:
|
430
|
-
:
|
431
|
-
:
|
429
|
+
type: :VARIABLE,
|
430
|
+
value: 'foo',
|
431
|
+
line: 1,
|
432
|
+
column: 4,
|
432
433
|
)
|
433
434
|
expect(tokens[2]).to have_attributes(
|
434
|
-
:
|
435
|
-
:
|
436
|
-
:
|
437
|
-
:
|
435
|
+
type: :DQMID,
|
436
|
+
value: '',
|
437
|
+
line: 1,
|
438
|
+
column: 7,
|
438
439
|
)
|
439
440
|
expect(tokens[3]).to have_attributes(
|
440
|
-
:
|
441
|
-
:
|
442
|
-
:
|
443
|
-
:
|
441
|
+
type: :VARIABLE,
|
442
|
+
value: 'bar',
|
443
|
+
line: 1,
|
444
|
+
column: 10,
|
444
445
|
)
|
445
446
|
expect(tokens[4]).to have_attributes(
|
446
|
-
:
|
447
|
-
:
|
448
|
-
:
|
449
|
-
:
|
447
|
+
type: :DQPOST,
|
448
|
+
value: '',
|
449
|
+
line: 1,
|
450
|
+
column: 13,
|
450
451
|
)
|
451
452
|
end
|
452
453
|
|
@@ -468,22 +469,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
468
469
|
expect(tokens).to have(3).tokens
|
469
470
|
|
470
471
|
expect(tokens[0]).to have_attributes(
|
471
|
-
:
|
472
|
-
:
|
473
|
-
:
|
474
|
-
:
|
472
|
+
type: :DQPRE,
|
473
|
+
value: '',
|
474
|
+
line: 1,
|
475
|
+
column: 1,
|
475
476
|
)
|
476
477
|
expect(tokens[1]).to have_attributes(
|
477
|
-
:
|
478
|
-
:
|
479
|
-
:
|
480
|
-
:
|
478
|
+
type: :UNENC_VARIABLE,
|
479
|
+
value: 'foo',
|
480
|
+
line: 1,
|
481
|
+
column: 2,
|
481
482
|
)
|
482
483
|
expect(tokens[2]).to have_attributes(
|
483
|
-
:
|
484
|
-
:
|
485
|
-
:
|
486
|
-
:
|
484
|
+
type: :DQPOST,
|
485
|
+
value: '',
|
486
|
+
line: 1,
|
487
|
+
column: 6,
|
487
488
|
)
|
488
489
|
end
|
489
490
|
|
@@ -505,22 +506,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
505
506
|
expect(tokens).to have(3).tokens
|
506
507
|
|
507
508
|
expect(tokens[0]).to have_attributes(
|
508
|
-
:
|
509
|
-
:
|
510
|
-
:
|
511
|
-
:
|
509
|
+
type: :DQPRE,
|
510
|
+
value: 'string with ',
|
511
|
+
line: 1,
|
512
|
+
column: 1,
|
512
513
|
)
|
513
514
|
expect(tokens[1]).to have_attributes(
|
514
|
-
:
|
515
|
-
:
|
516
|
-
:
|
517
|
-
:
|
515
|
+
type: :SSTRING,
|
516
|
+
value: 'a nested single quoted string',
|
517
|
+
line: 1,
|
518
|
+
column: 16,
|
518
519
|
)
|
519
520
|
expect(tokens[2]).to have_attributes(
|
520
|
-
:
|
521
|
-
:
|
522
|
-
:
|
523
|
-
:
|
521
|
+
type: :DQPOST,
|
522
|
+
value: ' inside it',
|
523
|
+
line: 1,
|
524
|
+
column: 47,
|
524
525
|
)
|
525
526
|
end
|
526
527
|
|
@@ -542,58 +543,58 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
542
543
|
expect(tokens).to have(9).tokens
|
543
544
|
|
544
545
|
expect(tokens[0]).to have_attributes(
|
545
|
-
:
|
546
|
-
:
|
547
|
-
:
|
548
|
-
:
|
546
|
+
type: :DQPRE,
|
547
|
+
value: 'string with ',
|
548
|
+
line: 1,
|
549
|
+
column: 1,
|
549
550
|
)
|
550
551
|
expect(tokens[1]).to have_attributes(
|
551
|
-
:
|
552
|
-
:
|
553
|
-
:
|
554
|
-
:
|
552
|
+
type: :LPAREN,
|
553
|
+
value: '(',
|
554
|
+
line: 1,
|
555
|
+
column: 16,
|
555
556
|
)
|
556
557
|
expect(tokens[2]).to have_attributes(
|
557
|
-
:
|
558
|
-
:
|
559
|
-
:
|
560
|
-
:
|
558
|
+
type: :NUMBER,
|
559
|
+
value: '3',
|
560
|
+
line: 1,
|
561
|
+
column: 17,
|
561
562
|
)
|
562
563
|
expect(tokens[3]).to have_attributes(
|
563
|
-
:
|
564
|
-
:
|
565
|
-
:
|
566
|
-
:
|
564
|
+
type: :PLUS,
|
565
|
+
value: '+',
|
566
|
+
line: 1,
|
567
|
+
column: 18,
|
567
568
|
)
|
568
569
|
expect(tokens[4]).to have_attributes(
|
569
|
-
:
|
570
|
-
:
|
571
|
-
:
|
572
|
-
:
|
570
|
+
type: :NUMBER,
|
571
|
+
value: '5',
|
572
|
+
line: 1,
|
573
|
+
column: 19,
|
573
574
|
)
|
574
575
|
expect(tokens[5]).to have_attributes(
|
575
|
-
:
|
576
|
-
:
|
577
|
-
:
|
578
|
-
:
|
576
|
+
type: :RPAREN,
|
577
|
+
value: ')',
|
578
|
+
line: 1,
|
579
|
+
column: 20,
|
579
580
|
)
|
580
581
|
expect(tokens[6]).to have_attributes(
|
581
|
-
:
|
582
|
-
:
|
583
|
-
:
|
584
|
-
:
|
582
|
+
type: :DIV,
|
583
|
+
value: '/',
|
584
|
+
line: 1,
|
585
|
+
column: 21,
|
585
586
|
)
|
586
587
|
expect(tokens[7]).to have_attributes(
|
587
|
-
:
|
588
|
-
:
|
589
|
-
:
|
590
|
-
:
|
588
|
+
type: :NUMBER,
|
589
|
+
value: '4',
|
590
|
+
line: 1,
|
591
|
+
column: 22,
|
591
592
|
)
|
592
593
|
expect(tokens[8]).to have_attributes(
|
593
|
-
:
|
594
|
-
:
|
595
|
-
:
|
596
|
-
:
|
594
|
+
type: :DQPOST,
|
595
|
+
value: ' nested math',
|
596
|
+
line: 1,
|
597
|
+
column: 23,
|
597
598
|
)
|
598
599
|
end
|
599
600
|
|
@@ -615,52 +616,52 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
615
616
|
expect(tokens).to have(8).tokens
|
616
617
|
|
617
618
|
expect(tokens[0]).to have_attributes(
|
618
|
-
:
|
619
|
-
:
|
620
|
-
:
|
621
|
-
:
|
619
|
+
type: :DQPRE,
|
620
|
+
value: 'string with ',
|
621
|
+
line: 1,
|
622
|
+
column: 1,
|
622
623
|
)
|
623
624
|
expect(tokens[1]).to have_attributes(
|
624
|
-
:
|
625
|
-
:
|
626
|
-
:
|
627
|
-
:
|
625
|
+
type: :LBRACK,
|
626
|
+
value: '[',
|
627
|
+
line: 1,
|
628
|
+
column: 16,
|
628
629
|
)
|
629
630
|
expect(tokens[2]).to have_attributes(
|
630
|
-
:
|
631
|
-
:
|
632
|
-
:
|
633
|
-
:
|
631
|
+
type: :SSTRING,
|
632
|
+
value: 'an array ',
|
633
|
+
line: 1,
|
634
|
+
column: 17,
|
634
635
|
)
|
635
636
|
expect(tokens[3]).to have_attributes(
|
636
|
-
:
|
637
|
-
:
|
638
|
-
:
|
639
|
-
:
|
637
|
+
type: :COMMA,
|
638
|
+
value: ',',
|
639
|
+
line: 1,
|
640
|
+
column: 28,
|
640
641
|
)
|
641
642
|
expect(tokens[4]).to have_attributes(
|
642
|
-
:
|
643
|
-
:
|
644
|
-
:
|
645
|
-
:
|
643
|
+
type: :WHITESPACE,
|
644
|
+
value: ' ',
|
645
|
+
line: 1,
|
646
|
+
column: 29,
|
646
647
|
)
|
647
648
|
expect(tokens[5]).to have_attributes(
|
648
|
-
:
|
649
|
-
:
|
650
|
-
:
|
651
|
-
:
|
649
|
+
type: :VARIABLE,
|
650
|
+
value: 'v2',
|
651
|
+
line: 1,
|
652
|
+
column: 30,
|
652
653
|
)
|
653
654
|
expect(tokens[6]).to have_attributes(
|
654
|
-
:
|
655
|
-
:
|
656
|
-
:
|
657
|
-
:
|
655
|
+
type: :RBRACK,
|
656
|
+
value: ']',
|
657
|
+
line: 1,
|
658
|
+
column: 33,
|
658
659
|
)
|
659
660
|
expect(tokens[7]).to have_attributes(
|
660
|
-
:
|
661
|
-
:
|
662
|
-
:
|
663
|
-
:
|
661
|
+
type: :DQPOST,
|
662
|
+
value: ' in it',
|
663
|
+
line: 1,
|
664
|
+
column: 34,
|
664
665
|
)
|
665
666
|
end
|
666
667
|
|
@@ -684,34 +685,34 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
684
685
|
expect(tokens).to have(5).tokens
|
685
686
|
|
686
687
|
expect(tokens[0]).to have_attributes(
|
687
|
-
:
|
688
|
-
:
|
689
|
-
:
|
690
|
-
:
|
688
|
+
type: :DQPRE,
|
689
|
+
value: '',
|
690
|
+
line: 1,
|
691
|
+
column: 1,
|
691
692
|
)
|
692
693
|
expect(tokens[1]).to have_attributes(
|
693
|
-
:
|
694
|
-
:
|
695
|
-
:
|
696
|
-
:
|
694
|
+
type: :UNENC_VARIABLE,
|
695
|
+
value: 'foo',
|
696
|
+
line: 1,
|
697
|
+
column: 2,
|
697
698
|
)
|
698
699
|
expect(tokens[2]).to have_attributes(
|
699
|
-
:
|
700
|
-
:
|
701
|
-
:
|
702
|
-
:
|
700
|
+
type: :DQMID,
|
701
|
+
value: '',
|
702
|
+
line: 1,
|
703
|
+
column: 6,
|
703
704
|
)
|
704
705
|
expect(tokens[3]).to have_attributes(
|
705
|
-
:
|
706
|
-
:
|
707
|
-
:
|
708
|
-
:
|
706
|
+
type: :UNENC_VARIABLE,
|
707
|
+
value: 'bar',
|
708
|
+
line: 1,
|
709
|
+
column: 6,
|
709
710
|
)
|
710
711
|
expect(tokens[4]).to have_attributes(
|
711
|
-
:
|
712
|
-
:
|
713
|
-
:
|
714
|
-
:
|
712
|
+
type: :DQPOST,
|
713
|
+
value: '',
|
714
|
+
line: 1,
|
715
|
+
column: 10,
|
715
716
|
)
|
716
717
|
end
|
717
718
|
|
@@ -733,22 +734,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
733
734
|
expect(tokens).to have(3).tokens
|
734
735
|
|
735
736
|
expect(tokens[0]).to have_attributes(
|
736
|
-
:
|
737
|
-
:
|
738
|
-
:
|
739
|
-
:
|
737
|
+
type: :DQPRE,
|
738
|
+
value: 'foo',
|
739
|
+
line: 1,
|
740
|
+
column: 1,
|
740
741
|
)
|
741
742
|
expect(tokens[1]).to have_attributes(
|
742
|
-
:
|
743
|
-
:
|
744
|
-
:
|
745
|
-
:
|
743
|
+
type: :UNENC_VARIABLE,
|
744
|
+
value: 'bar',
|
745
|
+
line: 1,
|
746
|
+
column: 5,
|
746
747
|
)
|
747
748
|
expect(tokens[2]).to have_attributes(
|
748
|
-
:
|
749
|
-
:
|
750
|
-
:
|
751
|
-
:
|
749
|
+
type: :DQPOST,
|
750
|
+
value: '$',
|
751
|
+
line: 1,
|
752
|
+
column: 9,
|
752
753
|
)
|
753
754
|
end
|
754
755
|
|
@@ -772,94 +773,94 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
772
773
|
expect(tokens).to have(15).tokens
|
773
774
|
|
774
775
|
expect(tokens[0]).to have_attributes(
|
775
|
-
:
|
776
|
-
:
|
777
|
-
:
|
778
|
-
:
|
776
|
+
type: :DQPRE,
|
777
|
+
value: '',
|
778
|
+
line: 1,
|
779
|
+
column: 1,
|
779
780
|
)
|
780
781
|
expect(tokens[1]).to have_attributes(
|
781
|
-
:
|
782
|
-
:
|
783
|
-
:
|
784
|
-
:
|
782
|
+
type: :VARIABLE,
|
783
|
+
value: 'key',
|
784
|
+
line: 1,
|
785
|
+
column: 4,
|
785
786
|
)
|
786
787
|
expect(tokens[2]).to have_attributes(
|
787
|
-
:
|
788
|
-
:
|
789
|
-
:
|
790
|
-
:
|
788
|
+
type: :DQMID,
|
789
|
+
value: ' ',
|
790
|
+
line: 1,
|
791
|
+
column: 7,
|
791
792
|
)
|
792
793
|
expect(tokens[3]).to have_attributes(
|
793
|
-
:
|
794
|
-
:
|
795
|
-
:
|
796
|
-
:
|
794
|
+
type: :FUNCTION_NAME,
|
795
|
+
value: 'flatten',
|
796
|
+
line: 1,
|
797
|
+
column: 11,
|
797
798
|
)
|
798
799
|
expect(tokens[4]).to have_attributes(
|
799
|
-
:
|
800
|
-
:
|
801
|
-
:
|
802
|
-
:
|
800
|
+
type: :LPAREN,
|
801
|
+
value: '(',
|
802
|
+
line: 1,
|
803
|
+
column: 18,
|
803
804
|
)
|
804
805
|
expect(tokens[5]).to have_attributes(
|
805
|
-
:
|
806
|
-
:
|
807
|
-
:
|
808
|
-
:
|
806
|
+
type: :LBRACK,
|
807
|
+
value: '[',
|
808
|
+
line: 1,
|
809
|
+
column: 19,
|
809
810
|
)
|
810
811
|
expect(tokens[6]).to have_attributes(
|
811
|
-
:
|
812
|
-
:
|
813
|
-
:
|
814
|
-
:
|
812
|
+
type: :VARIABLE,
|
813
|
+
value: 'value',
|
814
|
+
line: 1,
|
815
|
+
column: 20,
|
815
816
|
)
|
816
817
|
expect(tokens[7]).to have_attributes(
|
817
|
-
:
|
818
|
-
:
|
819
|
-
:
|
820
|
-
:
|
818
|
+
type: :RBRACK,
|
819
|
+
value: ']',
|
820
|
+
line: 1,
|
821
|
+
column: 26,
|
821
822
|
)
|
822
823
|
expect(tokens[8]).to have_attributes(
|
823
|
-
:
|
824
|
-
:
|
825
|
-
:
|
826
|
-
:
|
824
|
+
type: :RPAREN,
|
825
|
+
value: ')',
|
826
|
+
line: 1,
|
827
|
+
column: 27,
|
827
828
|
)
|
828
829
|
expect(tokens[9]).to have_attributes(
|
829
|
-
:
|
830
|
-
:
|
831
|
-
:
|
832
|
-
:
|
830
|
+
type: :DOT,
|
831
|
+
value: '.',
|
832
|
+
line: 1,
|
833
|
+
column: 28,
|
833
834
|
)
|
834
835
|
expect(tokens[10]).to have_attributes(
|
835
|
-
:
|
836
|
-
:
|
837
|
-
:
|
838
|
-
:
|
836
|
+
type: :FUNCTION_NAME,
|
837
|
+
value: 'join',
|
838
|
+
line: 1,
|
839
|
+
column: 29,
|
839
840
|
)
|
840
841
|
expect(tokens[11]).to have_attributes(
|
841
|
-
:
|
842
|
-
:
|
843
|
-
:
|
844
|
-
:
|
842
|
+
type: :LPAREN,
|
843
|
+
value: '(',
|
844
|
+
line: 1,
|
845
|
+
column: 33,
|
845
846
|
)
|
846
847
|
expect(tokens[12]).to have_attributes(
|
847
|
-
:
|
848
|
-
:
|
849
|
-
:
|
850
|
-
:
|
848
|
+
type: :STRING,
|
849
|
+
value: '\nkey ',
|
850
|
+
line: 1,
|
851
|
+
column: 34,
|
851
852
|
)
|
852
853
|
expect(tokens[13]).to have_attributes(
|
853
|
-
:
|
854
|
-
:
|
855
|
-
:
|
856
|
-
:
|
854
|
+
type: :RPAREN,
|
855
|
+
value: ')',
|
856
|
+
line: 1,
|
857
|
+
column: 42,
|
857
858
|
)
|
858
859
|
expect(tokens[14]).to have_attributes(
|
859
|
-
:
|
860
|
-
:
|
861
|
-
:
|
862
|
-
:
|
860
|
+
type: :DQPOST,
|
861
|
+
value: '',
|
862
|
+
line: 1,
|
863
|
+
column: 43,
|
863
864
|
)
|
864
865
|
end
|
865
866
|
|
@@ -883,94 +884,94 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
883
884
|
expect(tokens).to have(15).tokens
|
884
885
|
|
885
886
|
expect(tokens[0]).to have_attributes(
|
886
|
-
:
|
887
|
-
:
|
888
|
-
:
|
889
|
-
:
|
887
|
+
type: :DQPRE,
|
888
|
+
value: '',
|
889
|
+
line: 1,
|
890
|
+
column: 1,
|
890
891
|
)
|
891
892
|
expect(tokens[1]).to have_attributes(
|
892
|
-
:
|
893
|
-
:
|
894
|
-
:
|
895
|
-
:
|
893
|
+
type: :VARIABLE,
|
894
|
+
value: 'facts',
|
895
|
+
line: 1,
|
896
|
+
column: 4,
|
896
897
|
)
|
897
898
|
expect(tokens[2]).to have_attributes(
|
898
|
-
:
|
899
|
-
:
|
900
|
-
:
|
901
|
-
:
|
899
|
+
type: :LBRACK,
|
900
|
+
value: '[',
|
901
|
+
line: 1,
|
902
|
+
column: 9,
|
902
903
|
)
|
903
904
|
expect(tokens[3]).to have_attributes(
|
904
|
-
:
|
905
|
-
:
|
906
|
-
:
|
907
|
-
:
|
905
|
+
type: :DQPRE,
|
906
|
+
value: 'network_',
|
907
|
+
line: 1,
|
908
|
+
column: 10,
|
908
909
|
)
|
909
910
|
expect(tokens[4]).to have_attributes(
|
910
|
-
:
|
911
|
-
:
|
912
|
-
:
|
913
|
-
:
|
911
|
+
type: :VARIABLE,
|
912
|
+
value: 'iface',
|
913
|
+
line: 1,
|
914
|
+
column: 21,
|
914
915
|
)
|
915
916
|
expect(tokens[5]).to have_attributes(
|
916
|
-
:
|
917
|
-
:
|
918
|
-
:
|
919
|
-
:
|
917
|
+
type: :DQPOST,
|
918
|
+
value: '',
|
919
|
+
line: 1,
|
920
|
+
column: 26,
|
920
921
|
)
|
921
922
|
expect(tokens[6]).to have_attributes(
|
922
|
-
:
|
923
|
-
:
|
924
|
-
:
|
925
|
-
:
|
923
|
+
type: :RBRACK,
|
924
|
+
value: ']',
|
925
|
+
line: 1,
|
926
|
+
column: 28,
|
926
927
|
)
|
927
928
|
expect(tokens[7]).to have_attributes(
|
928
|
-
:
|
929
|
-
:
|
930
|
-
:
|
931
|
-
:
|
929
|
+
type: :DQMID,
|
930
|
+
value: '/',
|
931
|
+
line: 1,
|
932
|
+
column: 29,
|
932
933
|
)
|
933
934
|
expect(tokens[8]).to have_attributes(
|
934
|
-
:
|
935
|
-
:
|
936
|
-
:
|
937
|
-
:
|
935
|
+
type: :VARIABLE,
|
936
|
+
value: 'facts',
|
937
|
+
line: 1,
|
938
|
+
column: 33,
|
938
939
|
)
|
939
940
|
expect(tokens[9]).to have_attributes(
|
940
|
-
:
|
941
|
-
:
|
942
|
-
:
|
943
|
-
:
|
941
|
+
type: :LBRACK,
|
942
|
+
value: '[',
|
943
|
+
line: 1,
|
944
|
+
column: 38,
|
944
945
|
)
|
945
946
|
expect(tokens[10]).to have_attributes(
|
946
|
-
:
|
947
|
-
:
|
948
|
-
:
|
949
|
-
:
|
947
|
+
type: :DQPRE,
|
948
|
+
value: 'netmask_',
|
949
|
+
line: 1,
|
950
|
+
column: 39,
|
950
951
|
)
|
951
952
|
expect(tokens[11]).to have_attributes(
|
952
|
-
:
|
953
|
-
:
|
954
|
-
:
|
955
|
-
:
|
953
|
+
type: :VARIABLE,
|
954
|
+
value: 'iface',
|
955
|
+
line: 1,
|
956
|
+
column: 50,
|
956
957
|
)
|
957
958
|
expect(tokens[12]).to have_attributes(
|
958
|
-
:
|
959
|
-
:
|
960
|
-
:
|
961
|
-
:
|
959
|
+
type: :DQPOST,
|
960
|
+
value: '',
|
961
|
+
line: 1,
|
962
|
+
column: 55,
|
962
963
|
)
|
963
964
|
expect(tokens[13]).to have_attributes(
|
964
|
-
:
|
965
|
-
:
|
966
|
-
:
|
967
|
-
:
|
965
|
+
type: :RBRACK,
|
966
|
+
value: ']',
|
967
|
+
line: 1,
|
968
|
+
column: 57,
|
968
969
|
)
|
969
970
|
expect(tokens[14]).to have_attributes(
|
970
|
-
:
|
971
|
-
:
|
972
|
-
:
|
973
|
-
:
|
971
|
+
type: :DQPOST,
|
972
|
+
value: '',
|
973
|
+
line: 1,
|
974
|
+
column: 58,
|
974
975
|
)
|
975
976
|
end
|
976
977
|
|
@@ -992,112 +993,112 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
992
993
|
expect(tokens).to have(18).tokens
|
993
994
|
|
994
995
|
expect(tokens[0]).to have_attributes(
|
995
|
-
:
|
996
|
-
:
|
997
|
-
:
|
998
|
-
:
|
996
|
+
type: :DQPRE,
|
997
|
+
value: '',
|
998
|
+
line: 1,
|
999
|
+
column: 1,
|
999
1000
|
)
|
1000
1001
|
expect(tokens[1]).to have_attributes(
|
1001
|
-
:
|
1002
|
-
:
|
1003
|
-
:
|
1004
|
-
:
|
1002
|
+
type: :VARIABLE,
|
1003
|
+
value: 'foo',
|
1004
|
+
line: 1,
|
1005
|
+
column: 4,
|
1005
1006
|
)
|
1006
1007
|
expect(tokens[2]).to have_attributes(
|
1007
|
-
:
|
1008
|
-
:
|
1009
|
-
:
|
1010
|
-
:
|
1008
|
+
type: :DOT,
|
1009
|
+
value: '.',
|
1010
|
+
line: 1,
|
1011
|
+
column: 8,
|
1011
1012
|
)
|
1012
1013
|
expect(tokens[3]).to have_attributes(
|
1013
|
-
:
|
1014
|
-
:
|
1015
|
-
:
|
1016
|
-
:
|
1014
|
+
type: :NAME,
|
1015
|
+
value: 'map',
|
1016
|
+
line: 1,
|
1017
|
+
column: 9,
|
1017
1018
|
)
|
1018
1019
|
expect(tokens[4]).to have_attributes(
|
1019
|
-
:
|
1020
|
-
:
|
1021
|
-
:
|
1022
|
-
:
|
1020
|
+
type: :WHITESPACE,
|
1021
|
+
value: ' ',
|
1022
|
+
line: 1,
|
1023
|
+
column: 12,
|
1023
1024
|
)
|
1024
1025
|
expect(tokens[5]).to have_attributes(
|
1025
|
-
:
|
1026
|
-
:
|
1027
|
-
:
|
1028
|
-
:
|
1026
|
+
type: :PIPE,
|
1027
|
+
value: '|',
|
1028
|
+
line: 1,
|
1029
|
+
column: 13,
|
1029
1030
|
)
|
1030
1031
|
expect(tokens[6]).to have_attributes(
|
1031
|
-
:
|
1032
|
-
:
|
1033
|
-
:
|
1034
|
-
:
|
1032
|
+
type: :VARIABLE,
|
1033
|
+
value: 'bar',
|
1034
|
+
line: 1,
|
1035
|
+
column: 14,
|
1035
1036
|
)
|
1036
1037
|
expect(tokens[7]).to have_attributes(
|
1037
|
-
:
|
1038
|
-
:
|
1039
|
-
:
|
1040
|
-
:
|
1038
|
+
type: :PIPE,
|
1039
|
+
value: '|',
|
1040
|
+
line: 1,
|
1041
|
+
column: 18,
|
1041
1042
|
)
|
1042
1043
|
expect(tokens[8]).to have_attributes(
|
1043
|
-
:
|
1044
|
-
:
|
1045
|
-
:
|
1046
|
-
:
|
1044
|
+
type: :WHITESPACE,
|
1045
|
+
value: ' ',
|
1046
|
+
line: 1,
|
1047
|
+
column: 19,
|
1047
1048
|
)
|
1048
1049
|
expect(tokens[9]).to have_attributes(
|
1049
|
-
:
|
1050
|
-
:
|
1051
|
-
:
|
1052
|
-
:
|
1050
|
+
type: :LBRACE,
|
1051
|
+
value: '{',
|
1052
|
+
line: 1,
|
1053
|
+
column: 20,
|
1053
1054
|
)
|
1054
1055
|
expect(tokens[10]).to have_attributes(
|
1055
|
-
:
|
1056
|
-
:
|
1057
|
-
:
|
1058
|
-
:
|
1056
|
+
type: :WHITESPACE,
|
1057
|
+
value: ' ',
|
1058
|
+
line: 1,
|
1059
|
+
column: 21,
|
1059
1060
|
)
|
1060
1061
|
expect(tokens[11]).to have_attributes(
|
1061
|
-
:
|
1062
|
-
:
|
1063
|
-
:
|
1064
|
-
:
|
1062
|
+
type: :FUNCTION_NAME,
|
1063
|
+
value: 'something',
|
1064
|
+
line: 1,
|
1065
|
+
column: 22,
|
1065
1066
|
)
|
1066
1067
|
expect(tokens[12]).to have_attributes(
|
1067
|
-
:
|
1068
|
-
:
|
1069
|
-
:
|
1070
|
-
:
|
1068
|
+
type: :LPAREN,
|
1069
|
+
value: '(',
|
1070
|
+
line: 1,
|
1071
|
+
column: 31,
|
1071
1072
|
)
|
1072
1073
|
expect(tokens[13]).to have_attributes(
|
1073
|
-
:
|
1074
|
-
:
|
1075
|
-
:
|
1076
|
-
:
|
1074
|
+
type: :VARIABLE,
|
1075
|
+
value: 'bar',
|
1076
|
+
line: 1,
|
1077
|
+
column: 32,
|
1077
1078
|
)
|
1078
1079
|
expect(tokens[14]).to have_attributes(
|
1079
|
-
:
|
1080
|
-
:
|
1081
|
-
:
|
1082
|
-
:
|
1080
|
+
type: :RPAREN,
|
1081
|
+
value: ')',
|
1082
|
+
line: 1,
|
1083
|
+
column: 36,
|
1083
1084
|
)
|
1084
1085
|
expect(tokens[15]).to have_attributes(
|
1085
|
-
:
|
1086
|
-
:
|
1087
|
-
:
|
1088
|
-
:
|
1086
|
+
type: :WHITESPACE,
|
1087
|
+
value: ' ',
|
1088
|
+
line: 1,
|
1089
|
+
column: 37,
|
1089
1090
|
)
|
1090
1091
|
expect(tokens[16]).to have_attributes(
|
1091
|
-
:
|
1092
|
-
:
|
1093
|
-
:
|
1094
|
-
:
|
1092
|
+
type: :RBRACE,
|
1093
|
+
value: '}',
|
1094
|
+
line: 1,
|
1095
|
+
column: 38,
|
1095
1096
|
)
|
1096
1097
|
expect(tokens[17]).to have_attributes(
|
1097
|
-
:
|
1098
|
-
:
|
1099
|
-
:
|
1100
|
-
:
|
1098
|
+
type: :DQPOST,
|
1099
|
+
value: '',
|
1100
|
+
line: 1,
|
1101
|
+
column: 39,
|
1101
1102
|
)
|
1102
1103
|
end
|
1103
1104
|
|
@@ -1108,7 +1109,7 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
1108
1109
|
end
|
1109
1110
|
|
1110
1111
|
context ':STRING / :DQ' do
|
1111
|
-
it '
|
1112
|
+
it 'handles a string with newline characters' do
|
1112
1113
|
# rubocop:disable Layout/TrailingWhitespace
|
1113
1114
|
manifest = <<END
|
1114
1115
|
exec {
|
@@ -1121,7 +1122,7 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
|
|
1121
1122
|
}
|
1122
1123
|
END
|
1123
1124
|
# rubocop:enable Layout/TrailingWhitespace
|
1124
|
-
tokens =
|
1125
|
+
tokens = lexer.tokenise(manifest)
|
1125
1126
|
|
1126
1127
|
expect(tokens.length).to eq(34)
|
1127
1128
|
|
@@ -1263,58 +1264,40 @@ END
|
|
1263
1264
|
expect(tokens[33].column).to eq(4)
|
1264
1265
|
end
|
1265
1266
|
|
1266
|
-
it '
|
1267
|
-
token =
|
1267
|
+
it 'calculates the column number correctly after an enclosed variable' do
|
1268
|
+
token = lexer.tokenise(' "${foo}" =>').last
|
1268
1269
|
expect(token.type).to eq(:FARROW)
|
1269
1270
|
expect(token.column).to eq(12)
|
1270
1271
|
end
|
1271
1272
|
|
1272
|
-
it '
|
1273
|
-
token =
|
1273
|
+
it 'calculates the column number correctly after an enclosed variable starting with a string' do
|
1274
|
+
token = lexer.tokenise(' "bar${foo}" =>').last
|
1274
1275
|
expect(token.type).to eq(:FARROW)
|
1275
1276
|
expect(token.column).to eq(15)
|
1276
1277
|
end
|
1277
1278
|
|
1278
|
-
it '
|
1279
|
-
token =
|
1279
|
+
it 'calculates the column number correctly after an enclosed variable ending with a string' do
|
1280
|
+
token = lexer.tokenise(' "${foo}bar" =>').last
|
1280
1281
|
expect(token.type).to eq(:FARROW)
|
1281
1282
|
expect(token.column).to eq(15)
|
1282
1283
|
end
|
1283
1284
|
|
1284
|
-
it '
|
1285
|
-
token =
|
1285
|
+
it 'calculates the column number correctly after an enclosed variable surround by a string' do
|
1286
|
+
token = lexer.tokenise(' "foo${bar}baz" =>').last
|
1286
1287
|
expect(token.type).to eq(:FARROW)
|
1287
1288
|
expect(token.column).to eq(18)
|
1288
1289
|
end
|
1289
1290
|
|
1290
|
-
it '
|
1291
|
+
it 'does not enclose variable with a chained function call' do
|
1291
1292
|
manifest = '"This is ${a.test}"'
|
1292
|
-
tokens =
|
1293
|
+
tokens = lexer.tokenise(manifest)
|
1293
1294
|
expect(tokens.map(&:to_manifest).join('')).to eq(manifest)
|
1294
1295
|
end
|
1295
1296
|
end
|
1296
1297
|
|
1297
|
-
|
1298
|
-
|
1299
|
-
|
1300
|
-
default
|
1301
|
-
define
|
1302
|
-
import
|
1303
|
-
if
|
1304
|
-
elsif
|
1305
|
-
else
|
1306
|
-
inherits
|
1307
|
-
node
|
1308
|
-
and
|
1309
|
-
or
|
1310
|
-
undef
|
1311
|
-
true
|
1312
|
-
false
|
1313
|
-
in
|
1314
|
-
unless
|
1315
|
-
].each do |keyword|
|
1316
|
-
it "should handle '#{keyword}' as a keyword" do
|
1317
|
-
token = @lexer.tokenise(keyword).first
|
1298
|
+
['case', 'class', 'default', 'define', 'import', 'if', 'elsif', 'else', 'inherits', 'node', 'and', 'or', 'undef', 'true', 'false', 'in', 'unless'].each do |keyword|
|
1299
|
+
it "handles '#{keyword}' as a keyword" do
|
1300
|
+
token = lexer.tokenise(keyword).first
|
1318
1301
|
expect(token.type).to eq(keyword.upcase.to_sym)
|
1319
1302
|
expect(token.value).to eq(keyword)
|
1320
1303
|
end
|
@@ -1367,40 +1350,40 @@ END
|
|
1367
1350
|
[:NEWLINE, "\n"],
|
1368
1351
|
[:NEWLINE, "\r\n"],
|
1369
1352
|
].each do |name, string|
|
1370
|
-
it "
|
1371
|
-
token =
|
1353
|
+
it "has a token named '#{name}'" do
|
1354
|
+
token = lexer.tokenise(string).first
|
1372
1355
|
expect(token.type).to eq(name)
|
1373
1356
|
expect(token.value).to eq(string)
|
1374
1357
|
end
|
1375
1358
|
end
|
1376
1359
|
|
1377
1360
|
context ':TYPE' do
|
1378
|
-
it '
|
1379
|
-
token =
|
1361
|
+
it 'matches Data Types' do
|
1362
|
+
token = lexer.tokenise('Integer').first
|
1380
1363
|
expect(token.type).to eq(:TYPE)
|
1381
1364
|
expect(token.value).to eq('Integer')
|
1382
1365
|
end
|
1383
1366
|
|
1384
|
-
it '
|
1385
|
-
token =
|
1367
|
+
it 'matches Catalog Types' do
|
1368
|
+
token = lexer.tokenise('Resource').first
|
1386
1369
|
expect(token.type).to eq(:TYPE)
|
1387
1370
|
expect(token.value).to eq('Resource')
|
1388
1371
|
end
|
1389
1372
|
|
1390
|
-
it '
|
1391
|
-
token =
|
1373
|
+
it 'matches Abstract Types' do
|
1374
|
+
token = lexer.tokenise('Collection').first
|
1392
1375
|
expect(token.type).to eq(:TYPE)
|
1393
1376
|
expect(token.value).to eq('Collection')
|
1394
1377
|
end
|
1395
1378
|
|
1396
1379
|
describe 'Platform Types' do
|
1397
|
-
it '
|
1398
|
-
token =
|
1380
|
+
it 'matches Callable' do
|
1381
|
+
token = lexer.tokenise('Callable').first
|
1399
1382
|
expect(token.type).to eq(:TYPE)
|
1400
1383
|
expect(token.value).to eq('Callable')
|
1401
1384
|
end
|
1402
|
-
it '
|
1403
|
-
token =
|
1385
|
+
it 'matches Sensitive' do
|
1386
|
+
token = lexer.tokenise('Sensitive').first
|
1404
1387
|
expect(token.type).to eq(:TYPE)
|
1405
1388
|
expect(token.value).to eq('Sensitive')
|
1406
1389
|
end
|
@@ -1408,7 +1391,7 @@ END
|
|
1408
1391
|
end
|
1409
1392
|
|
1410
1393
|
context ':HEREDOC without interpolation' do
|
1411
|
-
it '
|
1394
|
+
it 'parses a simple heredoc' do
|
1412
1395
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1413
1396
|
$str = @(myheredoc)
|
1414
1397
|
SOMETHING
|
@@ -1416,7 +1399,7 @@ END
|
|
1416
1399
|
:
|
1417
1400
|
|-myheredoc
|
1418
1401
|
END
|
1419
|
-
tokens =
|
1402
|
+
tokens = lexer.tokenise(manifest)
|
1420
1403
|
|
1421
1404
|
expect(tokens.length).to eq(8)
|
1422
1405
|
expect(tokens[0].type).to eq(:VARIABLE)
|
@@ -1453,7 +1436,7 @@ END
|
|
1453
1436
|
expect(tokens[7].column).to eq(14)
|
1454
1437
|
end
|
1455
1438
|
|
1456
|
-
it '
|
1439
|
+
it 'does not interpolate the contents of the heredoc' do
|
1457
1440
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1458
1441
|
$str = @(myheredoc)
|
1459
1442
|
SOMETHING
|
@@ -1461,7 +1444,7 @@ END
|
|
1461
1444
|
:
|
1462
1445
|
|-myheredoc
|
1463
1446
|
END
|
1464
|
-
tokens =
|
1447
|
+
tokens = lexer.tokenise(manifest)
|
1465
1448
|
|
1466
1449
|
expect(tokens.length).to eq(8)
|
1467
1450
|
expect(tokens[0].type).to eq(:VARIABLE)
|
@@ -1499,7 +1482,7 @@ END
|
|
1499
1482
|
expect(tokens[7].column).to eq(14)
|
1500
1483
|
end
|
1501
1484
|
|
1502
|
-
it '
|
1485
|
+
it 'handles multiple heredoc declarations on a single line' do
|
1503
1486
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1504
1487
|
$str = "${@(end1)} ${@(end2)}"
|
1505
1488
|
foo
|
@@ -1507,7 +1490,7 @@ END
|
|
1507
1490
|
bar
|
1508
1491
|
|-end2
|
1509
1492
|
END
|
1510
|
-
tokens =
|
1493
|
+
tokens = lexer.tokenise(manifest)
|
1511
1494
|
|
1512
1495
|
expect(tokens.length).to eq(14)
|
1513
1496
|
expect(tokens[0].type).to eq(:VARIABLE)
|
@@ -1570,7 +1553,7 @@ END
|
|
1570
1553
|
expect(tokens[13].column).to eq(9)
|
1571
1554
|
end
|
1572
1555
|
|
1573
|
-
it '
|
1556
|
+
it 'handles a heredoc that specifies a syntax' do
|
1574
1557
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1575
1558
|
$str = @("end":json/)
|
1576
1559
|
{
|
@@ -1579,7 +1562,7 @@ END
|
|
1579
1562
|
|-end
|
1580
1563
|
END
|
1581
1564
|
|
1582
|
-
tokens =
|
1565
|
+
tokens = lexer.tokenise(manifest)
|
1583
1566
|
|
1584
1567
|
expect(tokens.length).to eq(8)
|
1585
1568
|
expect(tokens[0].type).to eq(:VARIABLE)
|
@@ -1617,13 +1600,13 @@ END
|
|
1617
1600
|
expect(tokens[7].column).to eq(8)
|
1618
1601
|
end
|
1619
1602
|
|
1620
|
-
it '
|
1603
|
+
it 'handles a heredoc with spaces in the tag' do
|
1621
1604
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1622
1605
|
$str = @("myheredoc" /)
|
1623
1606
|
foo
|
1624
1607
|
|-myheredoc
|
1625
1608
|
END
|
1626
|
-
tokens =
|
1609
|
+
tokens = lexer.tokenise(manifest)
|
1627
1610
|
expect(tokens.length).to eq(8)
|
1628
1611
|
|
1629
1612
|
expect(tokens[4].type).to eq(:HEREDOC_OPEN)
|
@@ -1632,13 +1615,13 @@ END
|
|
1632
1615
|
expect(tokens[6].value).to eq(" foo\n ")
|
1633
1616
|
end
|
1634
1617
|
|
1635
|
-
it '
|
1618
|
+
it 'handles a heredoc with no indentation' do
|
1636
1619
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1637
1620
|
$str = @(EOT)
|
1638
1621
|
something
|
1639
1622
|
EOT
|
1640
1623
|
END
|
1641
|
-
tokens =
|
1624
|
+
tokens = lexer.tokenise(manifest)
|
1642
1625
|
|
1643
1626
|
expect(tokens.length).to eq(8)
|
1644
1627
|
expect(tokens[4].type).to eq(:HEREDOC_OPEN)
|
@@ -1649,7 +1632,7 @@ END
|
|
1649
1632
|
end
|
1650
1633
|
|
1651
1634
|
context ':HEREDOC with interpolation' do
|
1652
|
-
it '
|
1635
|
+
it 'parses a heredoc with no interpolated values as a :HEREDOC' do
|
1653
1636
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1654
1637
|
$str = @("myheredoc"/)
|
1655
1638
|
SOMETHING
|
@@ -1657,7 +1640,7 @@ END
|
|
1657
1640
|
:
|
1658
1641
|
|-myheredoc
|
1659
1642
|
END
|
1660
|
-
tokens =
|
1643
|
+
tokens = lexer.tokenise(manifest)
|
1661
1644
|
|
1662
1645
|
expect(tokens[0].type).to eq(:VARIABLE)
|
1663
1646
|
expect(tokens[0].value).to eq('str')
|
@@ -1694,7 +1677,7 @@ END
|
|
1694
1677
|
expect(tokens[7].column).to eq(14)
|
1695
1678
|
end
|
1696
1679
|
|
1697
|
-
it '
|
1680
|
+
it 'parses a heredoc with interpolated values' do
|
1698
1681
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1699
1682
|
$str = @("myheredoc"/)
|
1700
1683
|
SOMETHING
|
@@ -1705,7 +1688,7 @@ END
|
|
1705
1688
|
| myheredoc
|
1706
1689
|
END
|
1707
1690
|
|
1708
|
-
tokens =
|
1691
|
+
tokens = lexer.tokenise(manifest)
|
1709
1692
|
expect(tokens.map(&:to_manifest).join('')).to eq(manifest)
|
1710
1693
|
|
1711
1694
|
expect(tokens[0].type).to eq(:VARIABLE)
|
@@ -1758,13 +1741,13 @@ END
|
|
1758
1741
|
expect(tokens[10].column).to eq(11)
|
1759
1742
|
end
|
1760
1743
|
|
1761
|
-
it '
|
1744
|
+
it 'does not remove the unnecessary $ from enclosed variables' do
|
1762
1745
|
manifest = <<-END.gsub(%r{^ {6}}, '')
|
1763
1746
|
$str = @("myheredoc"/)
|
1764
1747
|
${$myvar}
|
1765
1748
|
|-myheredoc
|
1766
1749
|
END
|
1767
|
-
tokens =
|
1750
|
+
tokens = lexer.tokenise(manifest)
|
1768
1751
|
|
1769
1752
|
expect(tokens.length).to eq(10)
|
1770
1753
|
|
@@ -1778,284 +1761,283 @@ END
|
|
1778
1761
|
end
|
1779
1762
|
|
1780
1763
|
context ':CLASSREF' do
|
1781
|
-
it '
|
1782
|
-
token =
|
1764
|
+
it 'matches single capitalised alphanumeric term' do
|
1765
|
+
token = lexer.tokenise('One').first
|
1783
1766
|
expect(token.type).to eq(:CLASSREF)
|
1784
1767
|
expect(token.value).to eq('One')
|
1785
1768
|
end
|
1786
1769
|
|
1787
|
-
it '
|
1788
|
-
token =
|
1770
|
+
it 'matches two capitalised alphanumeric terms sep by ::' do
|
1771
|
+
token = lexer.tokenise('One::Two').first
|
1789
1772
|
expect(token.type).to eq(:CLASSREF)
|
1790
1773
|
expect(token.value).to eq('One::Two')
|
1791
1774
|
end
|
1792
1775
|
|
1793
|
-
it '
|
1794
|
-
token =
|
1776
|
+
it 'matches many capitalised alphanumeric terms sep by ::' do
|
1777
|
+
token = lexer.tokenise('One::Two::Three::Four::Five').first
|
1795
1778
|
expect(token.type).to eq(:CLASSREF)
|
1796
1779
|
expect(token.value).to eq('One::Two::Three::Four::Five')
|
1797
1780
|
end
|
1798
1781
|
|
1799
|
-
it '
|
1800
|
-
token =
|
1782
|
+
it 'matches capitalised terms prefixed by ::' do
|
1783
|
+
token = lexer.tokenise('::One').first
|
1801
1784
|
expect(token.type).to eq(:CLASSREF)
|
1802
1785
|
expect(token.value).to eq('::One')
|
1803
1786
|
end
|
1804
1787
|
|
1805
|
-
it '
|
1806
|
-
token =
|
1788
|
+
it 'matches terms that start with Types' do
|
1789
|
+
token = lexer.tokenise('Regexp_foo').first
|
1807
1790
|
expect(token.type).to eq(:CLASSREF)
|
1808
1791
|
expect(token.value).to eq('Regexp_foo')
|
1809
1792
|
end
|
1810
1793
|
end
|
1811
1794
|
|
1812
1795
|
context ':NAME' do
|
1813
|
-
it '
|
1814
|
-
token =
|
1796
|
+
it 'matches lowercase alphanumeric terms' do
|
1797
|
+
token = lexer.tokenise('one-two').first
|
1815
1798
|
expect(token.type).to eq(:NAME)
|
1816
1799
|
expect(token.value).to eq('one-two')
|
1817
1800
|
end
|
1818
1801
|
|
1819
|
-
it '
|
1820
|
-
token =
|
1802
|
+
it 'matches lowercase alphanumeric terms sep by ::' do
|
1803
|
+
token = lexer.tokenise('one::two').first
|
1821
1804
|
expect(token.type).to eq(:NAME)
|
1822
1805
|
expect(token.value).to eq('one::two')
|
1823
1806
|
end
|
1824
1807
|
|
1825
|
-
it '
|
1826
|
-
token =
|
1808
|
+
it 'matches many lowercase alphanumeric terms sep by ::' do
|
1809
|
+
token = lexer.tokenise('one::two::three::four::five').first
|
1827
1810
|
expect(token.type).to eq(:NAME)
|
1828
1811
|
expect(token.value).to eq('one::two::three::four::five')
|
1829
1812
|
end
|
1830
1813
|
|
1831
|
-
it '
|
1832
|
-
token =
|
1814
|
+
it 'matches lowercase alphanumeric terms prefixed by ::' do
|
1815
|
+
token = lexer.tokenise('::1one::2two::3three').first
|
1833
1816
|
expect(token.type).to eq(:NAME)
|
1834
1817
|
expect(token.value).to eq('::1one::2two::3three')
|
1835
1818
|
end
|
1836
1819
|
|
1837
|
-
it '
|
1838
|
-
token =
|
1820
|
+
it 'matches barewords beginning with an underscore' do
|
1821
|
+
token = lexer.tokenise('_bareword').first
|
1839
1822
|
expect(token.type).to eq(:NAME)
|
1840
1823
|
expect(token.value).to eq('_bareword')
|
1841
1824
|
end
|
1842
1825
|
end
|
1843
1826
|
|
1844
1827
|
context ':FUNCTION_NAME' do
|
1845
|
-
it '
|
1846
|
-
token =
|
1828
|
+
it 'matches when a :NAME is followed by a :LPAREN' do
|
1829
|
+
token = lexer.tokenise('my_function(').first
|
1847
1830
|
expect(token.type).to eq(:FUNCTION_NAME)
|
1848
1831
|
expect(token.value).to eq('my_function')
|
1849
1832
|
end
|
1850
1833
|
end
|
1851
1834
|
|
1852
1835
|
context ':NUMBER' do
|
1853
|
-
it '
|
1854
|
-
token =
|
1836
|
+
it 'matches numeric terms' do
|
1837
|
+
token = lexer.tokenise('1234567890').first
|
1855
1838
|
expect(token.type).to eq(:NUMBER)
|
1856
1839
|
expect(token.value).to eq('1234567890')
|
1857
1840
|
end
|
1858
1841
|
|
1859
|
-
it '
|
1860
|
-
token =
|
1842
|
+
it 'matches float terms' do
|
1843
|
+
token = lexer.tokenise('12345.6789').first
|
1861
1844
|
expect(token.type).to eq(:NUMBER)
|
1862
1845
|
expect(token.value).to eq('12345.6789')
|
1863
1846
|
end
|
1864
1847
|
|
1865
|
-
it '
|
1866
|
-
token =
|
1848
|
+
it 'matches hexadecimal terms' do
|
1849
|
+
token = lexer.tokenise('0xCAFE1029').first
|
1867
1850
|
expect(token.type).to eq(:NUMBER)
|
1868
1851
|
expect(token.value).to eq('0xCAFE1029')
|
1869
1852
|
end
|
1870
1853
|
|
1871
|
-
|
1872
|
-
|
1873
|
-
|
1874
|
-
|
1854
|
+
[
|
1855
|
+
'10e23',
|
1856
|
+
'1.234e5',
|
1857
|
+
].each do |f|
|
1858
|
+
it 'matches float with exponent terms' do
|
1859
|
+
token = lexer.tokenise(f).first
|
1860
|
+
expect(token.type).to eq(:NUMBER)
|
1861
|
+
expect(token.value).to eq(f)
|
1862
|
+
end
|
1875
1863
|
end
|
1876
1864
|
|
1877
|
-
it '
|
1878
|
-
token =
|
1865
|
+
it 'matches float with negative exponent terms' do
|
1866
|
+
token = lexer.tokenise('10e-23').first
|
1879
1867
|
expect(token.type).to eq(:NUMBER)
|
1880
1868
|
expect(token.value).to eq('10e-23')
|
1881
1869
|
end
|
1882
|
-
|
1883
|
-
it 'should match float with exponent terms' do
|
1884
|
-
token = @lexer.tokenise('1.234e5').first
|
1885
|
-
expect(token.type).to eq(:NUMBER)
|
1886
|
-
expect(token.value).to eq('1.234e5')
|
1887
|
-
end
|
1888
1870
|
end
|
1889
1871
|
|
1890
1872
|
context ':COMMENT' do
|
1891
|
-
it '
|
1892
|
-
token =
|
1873
|
+
it 'matches everything on a line after #' do
|
1874
|
+
token = lexer.tokenise('foo # bar baz')[2]
|
1893
1875
|
expect(token.type).to eq(:COMMENT)
|
1894
1876
|
expect(token.value).to eq(' bar baz')
|
1895
1877
|
end
|
1896
1878
|
|
1897
|
-
it '
|
1898
|
-
tokens =
|
1879
|
+
it 'does not include DOS line endings in the comment value' do
|
1880
|
+
tokens = lexer.tokenise("foo # bar baz\r\n")
|
1899
1881
|
|
1900
|
-
expect(tokens[2]).to have_attributes(:
|
1901
|
-
expect(tokens[3]).to have_attributes(:
|
1882
|
+
expect(tokens[2]).to have_attributes(type: :COMMENT, value: ' bar baz')
|
1883
|
+
expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\r\n")
|
1902
1884
|
end
|
1903
1885
|
|
1904
|
-
it '
|
1905
|
-
tokens =
|
1886
|
+
it 'does not include Unix line endings in the comment value' do
|
1887
|
+
tokens = lexer.tokenise("foo # bar baz\n")
|
1906
1888
|
|
1907
|
-
expect(tokens[2]).to have_attributes(:
|
1908
|
-
expect(tokens[3]).to have_attributes(:
|
1889
|
+
expect(tokens[2]).to have_attributes(type: :COMMENT, value: ' bar baz')
|
1890
|
+
expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\n")
|
1909
1891
|
end
|
1910
1892
|
end
|
1911
1893
|
|
1912
1894
|
context ':MLCOMMENT' do
|
1913
|
-
it '
|
1914
|
-
token =
|
1895
|
+
it 'matches comments on a single line' do
|
1896
|
+
token = lexer.tokenise('/* foo bar */').first
|
1915
1897
|
expect(token.type).to eq(:MLCOMMENT)
|
1916
1898
|
expect(token.value).to eq('foo bar')
|
1917
1899
|
end
|
1918
1900
|
|
1919
|
-
it '
|
1920
|
-
token =
|
1901
|
+
it 'matches comments on multiple lines' do
|
1902
|
+
token = lexer.tokenise("/* foo\n * bar\n*/").first
|
1921
1903
|
expect(token.type).to eq(:MLCOMMENT)
|
1922
1904
|
expect(token.value).to eq("foo\n bar\n")
|
1923
1905
|
end
|
1924
1906
|
end
|
1925
1907
|
|
1926
1908
|
context ':SLASH_COMMENT' do
|
1927
|
-
it '
|
1928
|
-
token =
|
1909
|
+
it 'matches everyone on a line after //' do
|
1910
|
+
token = lexer.tokenise('foo // bar baz')[2]
|
1929
1911
|
expect(token.type).to eq(:SLASH_COMMENT)
|
1930
1912
|
expect(token.value).to eq(' bar baz')
|
1931
1913
|
end
|
1932
1914
|
|
1933
|
-
it '
|
1934
|
-
tokens =
|
1915
|
+
it 'does not include DOS line endings in the comment value' do
|
1916
|
+
tokens = lexer.tokenise("foo // bar baz\r\n")
|
1935
1917
|
|
1936
|
-
expect(tokens[2]).to have_attributes(:
|
1937
|
-
expect(tokens[3]).to have_attributes(:
|
1918
|
+
expect(tokens[2]).to have_attributes(type: :SLASH_COMMENT, value: ' bar baz')
|
1919
|
+
expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\r\n")
|
1938
1920
|
end
|
1939
1921
|
|
1940
|
-
it '
|
1941
|
-
tokens =
|
1922
|
+
it 'does not include Unix line endings in the comment value' do
|
1923
|
+
tokens = lexer.tokenise("foo // bar baz\n")
|
1942
1924
|
|
1943
|
-
expect(tokens[2]).to have_attributes(:
|
1944
|
-
expect(tokens[3]).to have_attributes(:
|
1925
|
+
expect(tokens[2]).to have_attributes(type: :SLASH_COMMENT, value: ' bar baz')
|
1926
|
+
expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\n")
|
1945
1927
|
end
|
1946
1928
|
end
|
1947
1929
|
|
1948
1930
|
context ':SSTRING' do
|
1949
|
-
it '
|
1950
|
-
token =
|
1931
|
+
it 'matches a single quoted string' do
|
1932
|
+
token = lexer.tokenise("'single quoted string'").first
|
1951
1933
|
expect(token.type).to eq(:SSTRING)
|
1952
1934
|
expect(token.value).to eq('single quoted string')
|
1953
1935
|
end
|
1954
1936
|
|
1955
|
-
it "
|
1956
|
-
token =
|
1937
|
+
it "matches a single quoted string with an escaped '" do
|
1938
|
+
token = lexer.tokenise(%q('single quoted string with "\\'"')).first
|
1957
1939
|
expect(token.type).to eq(:SSTRING)
|
1958
1940
|
expect(token.value).to eq('single quoted string with "\\\'"')
|
1959
1941
|
end
|
1960
1942
|
|
1961
|
-
it '
|
1962
|
-
token =
|
1943
|
+
it 'matches a single quoted string with an escaped $' do
|
1944
|
+
token = lexer.tokenise(%q('single quoted string with "\$"')).first
|
1963
1945
|
expect(token.type).to eq(:SSTRING)
|
1964
1946
|
expect(token.value).to eq('single quoted string with "\\$"')
|
1965
1947
|
end
|
1966
1948
|
|
1967
|
-
it '
|
1968
|
-
token =
|
1949
|
+
it 'matches a single quoted string with an escaped .' do
|
1950
|
+
token = lexer.tokenise(%q('single quoted string with "\."')).first
|
1969
1951
|
expect(token.type).to eq(:SSTRING)
|
1970
1952
|
expect(token.value).to eq('single quoted string with "\\."')
|
1971
1953
|
end
|
1972
1954
|
|
1973
|
-
it '
|
1974
|
-
token =
|
1955
|
+
it 'matches a single quoted string with an escaped \n' do
|
1956
|
+
token = lexer.tokenise(%q('single quoted string with "\n"')).first
|
1975
1957
|
expect(token.type).to eq(:SSTRING)
|
1976
1958
|
expect(token.value).to eq('single quoted string with "\\n"')
|
1977
1959
|
end
|
1978
1960
|
|
1979
|
-
it '
|
1980
|
-
|
1981
|
-
|
1982
|
-
|
1983
|
-
end
|
1984
|
-
|
1985
|
-
it '
|
1986
|
-
token =
|
1961
|
+
# it 'matches a single quoted string with an escaped \' do
|
1962
|
+
# token = lexer.tokenise(%q('single quoted string with "\\\\"')).first
|
1963
|
+
# expect(token.type).to eq(:SSTRING)
|
1964
|
+
# expect(token.value).to eq('single quoted string with "\\\\"')
|
1965
|
+
# end
|
1966
|
+
#
|
1967
|
+
it 'matches an empty string' do
|
1968
|
+
token = lexer.tokenise("''").first
|
1987
1969
|
expect(token.type).to eq(:SSTRING)
|
1988
1970
|
expect(token.value).to eq('')
|
1989
1971
|
end
|
1990
1972
|
|
1991
|
-
it '
|
1992
|
-
token =
|
1973
|
+
it 'matches an empty string ending with \\' do
|
1974
|
+
token = lexer.tokenise("'foo\\\\'").first
|
1993
1975
|
expect(token.type).to eq(:SSTRING)
|
1994
1976
|
expect(token.value).to eq(%(foo\\\\))
|
1995
1977
|
end
|
1996
1978
|
|
1997
|
-
it '
|
1998
|
-
token =
|
1979
|
+
it 'matches single quoted string containing a line break' do
|
1980
|
+
token = lexer.tokenise("'\n'").first
|
1999
1981
|
expect(token.type).to eq(:SSTRING)
|
2000
1982
|
expect(token.value).to eq("\n")
|
2001
1983
|
end
|
2002
1984
|
end
|
2003
1985
|
|
2004
1986
|
context ':REGEX' do
|
2005
|
-
it '
|
2006
|
-
token =
|
1987
|
+
it 'matches anything enclosed in //' do
|
1988
|
+
token = lexer.tokenise('/this is a regex/').first
|
2007
1989
|
expect(token.type).to eq(:REGEX)
|
2008
1990
|
expect(token.value).to eq('this is a regex')
|
2009
1991
|
end
|
2010
1992
|
|
2011
|
-
it '
|
2012
|
-
token =
|
1993
|
+
it 'matches even if there is \n in the regex' do
|
1994
|
+
token = lexer.tokenise("/this is a regex,\ntoo/").first
|
2013
1995
|
expect(token.type).to eq(:REGEX)
|
2014
1996
|
expect(token.value).to eq("this is a regex,\ntoo")
|
2015
1997
|
end
|
2016
1998
|
|
2017
|
-
it '
|
2018
|
-
token =
|
1999
|
+
it 'does not consider \/ to be the end of the regex' do
|
2000
|
+
token = lexer.tokenise('/this is \/ a regex/').first
|
2019
2001
|
expect(token.type).to eq(:REGEX)
|
2020
2002
|
expect(token.value).to eq('this is \\/ a regex')
|
2021
2003
|
end
|
2022
2004
|
|
2023
|
-
it '
|
2024
|
-
tokens =
|
2005
|
+
it 'is allowed as a param to a data type' do
|
2006
|
+
tokens = lexer.tokenise('Foo[/bar/]')
|
2025
2007
|
expect(tokens[2].type).to eq(:REGEX)
|
2026
2008
|
expect(tokens[2].value).to eq('bar')
|
2027
2009
|
end
|
2028
2010
|
|
2029
|
-
it '
|
2030
|
-
tokens =
|
2011
|
+
it 'is allowed as a param to an optional data type' do
|
2012
|
+
tokens = lexer.tokenise('Optional[Regexp[/^puppet/]]')
|
2031
2013
|
expect(tokens[4].type).to eq(:REGEX)
|
2032
2014
|
expect(tokens[4].value).to eq('^puppet')
|
2033
2015
|
end
|
2034
2016
|
|
2035
|
-
it '
|
2036
|
-
tokens =
|
2017
|
+
it 'does not match chained division' do
|
2018
|
+
tokens = lexer.tokenise('$x = $a/$b/$c')
|
2037
2019
|
expect(tokens.select { |r| r.type == :REGEX }).to be_empty
|
2038
2020
|
end
|
2039
2021
|
|
2040
|
-
it '
|
2041
|
-
tokens =
|
2022
|
+
it 'properlies parse when regex follows an if' do
|
2023
|
+
tokens = lexer.tokenise('if /^icinga_service_icon_.*/ in $location_info { }')
|
2042
2024
|
expect(tokens[2].type).to eq(:REGEX)
|
2043
2025
|
end
|
2044
2026
|
|
2045
|
-
it '
|
2046
|
-
tokens =
|
2027
|
+
it 'properlies parse when a regex follows an elsif' do
|
2028
|
+
tokens = lexer.tokenise('if /a/ in $location_info { } elsif /b/ in $location_info { }')
|
2047
2029
|
expect(tokens[2].type).to eq(:REGEX)
|
2048
2030
|
expect(tokens[14].type).to eq(:REGEX)
|
2049
2031
|
end
|
2050
2032
|
|
2051
|
-
it '
|
2052
|
-
tokens =
|
2033
|
+
it 'properlies parse when a regex is provided as a function argument' do
|
2034
|
+
tokens = lexer.tokenise('$somevar = $other_var.match(/([\w\.]+(:\d+)?(\/\w+)?)(:(\w+))?/)')
|
2053
2035
|
expect(tokens[8].type).to eq(:REGEX)
|
2054
2036
|
expect(tokens[8].value).to eq('([\w\.]+(:\d+)?(\/\w+)?)(:(\w+))?')
|
2055
2037
|
end
|
2056
2038
|
|
2057
|
-
it '
|
2058
|
-
tokens =
|
2039
|
+
it 'discriminates between division and regexes' do
|
2040
|
+
tokens = lexer.tokenise('if $a/10==0 or $b=~/{}/')
|
2059
2041
|
expect(tokens[3].type).to eq(:DIV)
|
2060
2042
|
expect(tokens[12].type).to eq(:REGEX)
|
2061
2043
|
expect(tokens[12].value).to eq('{}')
|
@@ -2063,22 +2045,22 @@ END
|
|
2063
2045
|
end
|
2064
2046
|
|
2065
2047
|
context ':STRING' do
|
2066
|
-
it '
|
2048
|
+
it 'parses strings with embedded strings' do
|
2067
2049
|
expect {
|
2068
|
-
|
2069
|
-
}.
|
2050
|
+
lexer.tokenise('exec { "/bin/echo \"${environment}\"": }')
|
2051
|
+
}.not_to raise_error
|
2070
2052
|
end
|
2071
2053
|
|
2072
|
-
it '
|
2073
|
-
token =
|
2054
|
+
it 'matches double quoted string containing a line break' do
|
2055
|
+
token = lexer.tokenise(%("\n")).first
|
2074
2056
|
expect(token.type).to eq(:STRING)
|
2075
2057
|
expect(token.value).to eq("\n")
|
2076
2058
|
end
|
2077
2059
|
|
2078
|
-
it '
|
2060
|
+
it 'handles interpolated values that contain double quotes' do
|
2079
2061
|
manifest = %{"export bar=\\"${join(hiera('test'), "," )}\\""}
|
2080
2062
|
|
2081
|
-
tokens =
|
2063
|
+
tokens = lexer.tokenise(manifest)
|
2082
2064
|
expect(tokens[0].type).to eq(:DQPRE)
|
2083
2065
|
expect(tokens[0].value).to eq('export bar=\"')
|
2084
2066
|
expect(tokens[1].type).to eq(:FUNCTION_NAME)
|
@@ -2104,20 +2086,20 @@ END
|
|
2104
2086
|
end
|
2105
2087
|
|
2106
2088
|
context ':WHITESPACE' do
|
2107
|
-
it '
|
2108
|
-
token =
|
2089
|
+
it 'parses spaces' do
|
2090
|
+
token = lexer.tokenise(' ').first
|
2109
2091
|
expect(token.type).to eq(:WHITESPACE)
|
2110
2092
|
expect(token.value).to eq(' ')
|
2111
2093
|
end
|
2112
2094
|
|
2113
|
-
it '
|
2114
|
-
token =
|
2095
|
+
it 'parses tabs' do
|
2096
|
+
token = lexer.tokenise("\t").first
|
2115
2097
|
expect(token.type).to eq(:WHITESPACE)
|
2116
2098
|
expect(token.value).to eq("\t")
|
2117
2099
|
end
|
2118
2100
|
|
2119
|
-
it '
|
2120
|
-
token =
|
2101
|
+
it 'parses unicode spaces', unless: RUBY_VERSION == '1.8.7' do
|
2102
|
+
token = lexer.tokenise("\xc2\xa0").first
|
2121
2103
|
expect(token.type).to eq(:WHITESPACE)
|
2122
2104
|
expect(token.value).to eq("\xc2\xa0")
|
2123
2105
|
end
|