puppet-lint 2.5.2 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (112) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +522 -0
  3. data/lib/puppet-lint/bin.rb +71 -6
  4. data/lib/puppet-lint/checkplugin.rb +43 -9
  5. data/lib/puppet-lint/checks.rb +16 -16
  6. data/lib/puppet-lint/configuration.rb +134 -134
  7. data/lib/puppet-lint/data.rb +28 -28
  8. data/lib/puppet-lint/lexer/string_slurper.rb +138 -140
  9. data/lib/puppet-lint/lexer/token.rb +188 -190
  10. data/lib/puppet-lint/lexer.rb +416 -417
  11. data/lib/puppet-lint/monkeypatches.rb +1 -1
  12. data/lib/puppet-lint/optparser.rb +5 -1
  13. data/lib/puppet-lint/plugins/check_classes/arrow_on_right_operand_line.rb +6 -4
  14. data/lib/puppet-lint/plugins/check_classes/autoloader_layout.rb +5 -3
  15. data/lib/puppet-lint/plugins/check_classes/class_inherits_from_params_class.rb +6 -4
  16. data/lib/puppet-lint/plugins/check_classes/code_on_top_scope.rb +5 -3
  17. data/lib/puppet-lint/plugins/check_classes/inherits_across_namespaces.rb +5 -3
  18. data/lib/puppet-lint/plugins/check_classes/names_containing_dash.rb +5 -3
  19. data/lib/puppet-lint/plugins/check_classes/names_containing_uppercase.rb +7 -5
  20. data/lib/puppet-lint/plugins/check_classes/nested_classes_or_defines.rb +5 -3
  21. data/lib/puppet-lint/plugins/check_classes/parameter_order.rb +7 -4
  22. data/lib/puppet-lint/plugins/check_classes/right_to_left_relationship.rb +5 -3
  23. data/lib/puppet-lint/plugins/check_classes/variable_scope.rb +15 -13
  24. data/lib/puppet-lint/plugins/check_comments/slash_comments.rb +9 -7
  25. data/lib/puppet-lint/plugins/check_comments/star_comments.rb +10 -8
  26. data/lib/puppet-lint/plugins/check_conditionals/case_without_default.rb +6 -4
  27. data/lib/puppet-lint/plugins/check_conditionals/selector_inside_resource.rb +5 -3
  28. data/lib/puppet-lint/plugins/check_documentation/documentation.rb +7 -3
  29. data/lib/puppet-lint/plugins/check_nodes/unquoted_node_name.rb +15 -11
  30. data/lib/puppet-lint/plugins/check_resources/duplicate_params.rb +5 -3
  31. data/lib/puppet-lint/plugins/check_resources/ensure_first_param.rb +8 -5
  32. data/lib/puppet-lint/plugins/check_resources/ensure_not_symlink_target.rb +11 -8
  33. data/lib/puppet-lint/plugins/check_resources/file_mode.rb +14 -9
  34. data/lib/puppet-lint/plugins/check_resources/unquoted_file_mode.rb +11 -6
  35. data/lib/puppet-lint/plugins/check_resources/unquoted_resource_title.rb +6 -4
  36. data/lib/puppet-lint/plugins/check_strings/double_quoted_strings.rb +12 -7
  37. data/lib/puppet-lint/plugins/check_strings/only_variable_string.rb +8 -6
  38. data/lib/puppet-lint/plugins/check_strings/puppet_url_without_modules.rb +14 -8
  39. data/lib/puppet-lint/plugins/check_strings/quoted_booleans.rb +11 -7
  40. data/lib/puppet-lint/plugins/check_strings/single_quote_string_with_variables.rb +11 -6
  41. data/lib/puppet-lint/plugins/check_strings/variables_not_enclosed.rb +12 -8
  42. data/lib/puppet-lint/plugins/check_variables/variable_contains_dash.rb +11 -7
  43. data/lib/puppet-lint/plugins/check_variables/variable_is_lowercase.rb +11 -7
  44. data/lib/puppet-lint/plugins/check_whitespace/140chars.rb +3 -8
  45. data/lib/puppet-lint/plugins/check_whitespace/2sp_soft_tabs.rb +10 -8
  46. data/lib/puppet-lint/plugins/check_whitespace/80chars.rb +3 -8
  47. data/lib/puppet-lint/plugins/check_whitespace/arrow_alignment.rb +10 -8
  48. data/lib/puppet-lint/plugins/check_whitespace/hard_tabs.rb +11 -7
  49. data/lib/puppet-lint/plugins/check_whitespace/line_length.rb +29 -0
  50. data/lib/puppet-lint/plugins/check_whitespace/trailing_whitespace.rb +13 -7
  51. data/lib/puppet-lint/plugins.rb +63 -61
  52. data/lib/puppet-lint/report/github.rb +17 -0
  53. data/lib/puppet-lint/report/sarif_template.json +63 -0
  54. data/lib/puppet-lint/tasks/puppet-lint.rb +84 -83
  55. data/lib/puppet-lint/tasks/release_test.rb +4 -1
  56. data/lib/puppet-lint/version.rb +1 -1
  57. data/lib/puppet-lint.rb +27 -12
  58. data/spec/acceptance/puppet_lint_spec.rb +46 -0
  59. data/spec/spec_helper.rb +92 -91
  60. data/spec/spec_helper_acceptance.rb +6 -0
  61. data/spec/spec_helper_acceptance_local.rb +38 -0
  62. data/spec/{puppet-lint → unit/puppet-lint}/bin_spec.rb +79 -35
  63. data/spec/{puppet-lint → unit/puppet-lint}/checks_spec.rb +36 -36
  64. data/spec/unit/puppet-lint/configuration_spec.rb +88 -0
  65. data/spec/{puppet-lint → unit/puppet-lint}/data_spec.rb +6 -3
  66. data/spec/{puppet-lint → unit/puppet-lint}/ignore_overrides_spec.rb +17 -17
  67. data/spec/{puppet-lint → unit/puppet-lint}/lexer/string_slurper_spec.rb +128 -128
  68. data/spec/{puppet-lint → unit/puppet-lint}/lexer/token_spec.rb +1 -1
  69. data/spec/{puppet-lint → unit/puppet-lint}/lexer_spec.rb +653 -671
  70. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/arrow_on_right_operand_line_spec.rb +16 -16
  71. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/autoloader_layout_spec.rb +13 -13
  72. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/class_inherits_from_params_class_spec.rb +3 -3
  73. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/code_on_top_scope_spec.rb +4 -4
  74. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/inherits_across_namespaces_spec.rb +4 -4
  75. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/name_contains_uppercase_spec.rb +10 -10
  76. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/names_containing_dash_spec.rb +7 -7
  77. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/nested_classes_or_defines_spec.rb +7 -7
  78. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/parameter_order_spec.rb +9 -9
  79. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/right_to_left_relationship_spec.rb +3 -3
  80. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_classes/variable_scope_spec.rb +25 -25
  81. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_comments/slash_comments_spec.rb +7 -7
  82. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_comments/star_comments_spec.rb +13 -13
  83. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_conditionals/case_without_default_spec.rb +10 -10
  84. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_conditionals/selector_inside_resource_spec.rb +3 -3
  85. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_documentation/documentation_spec.rb +8 -8
  86. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_nodes/unquoted_node_name_spec.rb +24 -24
  87. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/duplicate_params_spec.rb +9 -9
  88. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/ensure_first_param_spec.rb +19 -19
  89. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/ensure_not_symlink_target_spec.rb +10 -10
  90. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/file_mode_spec.rb +40 -40
  91. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/unquoted_file_mode_spec.rb +20 -20
  92. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_resources/unquoted_resource_title_spec.rb +24 -24
  93. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/double_quoted_strings_spec.rb +27 -27
  94. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/only_variable_string_spec.rb +18 -18
  95. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/puppet_url_without_modules_spec.rb +9 -9
  96. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/quoted_booleans_spec.rb +22 -22
  97. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/single_quote_string_with_variables_spec.rb +2 -2
  98. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_strings/variables_not_enclosed_spec.rb +21 -21
  99. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_variables/variable_contains_dash_spec.rb +6 -6
  100. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_variables/variable_is_lowercase_spec.rb +7 -7
  101. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/140chars_spec.rb +5 -5
  102. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/2sp_soft_tabs_spec.rb +2 -2
  103. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/80chars_spec.rb +6 -6
  104. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/arrow_alignment_spec.rb +127 -127
  105. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/hard_tabs_spec.rb +7 -7
  106. data/spec/{puppet-lint → unit/puppet-lint}/plugins/check_whitespace/trailing_whitespace_spec.rb +15 -15
  107. data/spec/unit/puppet-lint/puppet-lint_spec.rb +18 -0
  108. metadata +63 -119
  109. data/CHANGELOG.md +0 -33
  110. data/HISTORY.md +0 -1130
  111. data/spec/puppet-lint/configuration_spec.rb +0 -66
  112. data/spec/puppet-lint_spec.rb +0 -16
@@ -2,73 +2,74 @@
2
2
 
3
3
  require 'spec_helper'
4
4
 
5
- describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
6
- before do
7
- @lexer = PuppetLint::Lexer.new
5
+ describe PuppetLint::Lexer do
6
+ subject(:lexer) do
7
+ described_class.new
8
8
  end
9
9
 
10
10
  context 'invalid code' do
11
- it 'should bork' do
12
- expect { @lexer.tokenise('^') }.to raise_error(PuppetLint::LexerError)
11
+ it 'borks' do
12
+ expect { lexer.tokenise('^') }.to raise_error(PuppetLint::LexerError)
13
13
  end
14
14
  end
15
15
 
16
16
  context '#new_token' do
17
- it 'should calculate the line number for an empty string' do
18
- token = @lexer.new_token(:TEST, 'test')
17
+ it 'calculates the line number for an empty string' do
18
+ token = lexer.new_token(:TEST, 'test')
19
19
  expect(token.line).to eq(1)
20
20
  end
21
21
 
22
- it 'should get correct line number after double quoted multi line string' do
23
- @lexer.new_token(:STRING, "test\ntest")
24
- token = @lexer.new_token(:TEST, 'test')
22
+ it 'gets correct line number after double quoted multi line string' do
23
+ lexer.new_token(:STRING, "test\ntest")
24
+ token = lexer.new_token(:TEST, 'test')
25
25
  expect(token.line).to eq(2)
26
26
  end
27
27
 
28
- it 'should get correct line number after a multi line comment' do
29
- @lexer.new_token(:MLCOMMENT, "test\ntest", :raw => "/*test\ntest*/")
30
- token = @lexer.new_token(:TEST, 'test')
28
+ it 'gets correct line number after a multi line comment' do
29
+ lexer.new_token(:MLCOMMENT, "test\ntest", raw: "/*test\ntest*/")
30
+ token = lexer.new_token(:TEST, 'test')
31
31
  expect(token.line).to eq(2)
32
32
  end
33
33
 
34
- it 'should calculate the line number for a multi line string' do
35
- @lexer.new_token(:SSTRING, "test\ntest")
36
- token = @lexer.new_token(:TEST, 'test')
34
+ it 'calculates the line number for a multi line string' do
35
+ lexer.new_token(:SSTRING, "test\ntest")
36
+ token = lexer.new_token(:TEST, 'test')
37
37
  expect(token.line).to eq(2)
38
38
  end
39
39
 
40
- it 'should calculate line number for string that ends with newline' do
41
- @lexer.new_token(:SSTRING, "test\n")
42
- token = @lexer.new_token(:TEST, 'test')
40
+ it 'calculates line number for string that ends with newline' do
41
+ lexer.new_token(:SSTRING, "test\n")
42
+ token = lexer.new_token(:TEST, 'test')
43
43
  expect(token.line).to eq(2)
44
44
  end
45
45
 
46
- it 'should calculate the column number for an empty string' do
47
- token = @lexer.new_token(:TEST, 'test')
46
+ it 'calculates the column number for an empty string' do
47
+ token = lexer.new_token(:TEST, 'test')
48
48
  expect(token.column).to eq(1)
49
49
  end
50
50
 
51
- it 'should calculate the column number for a single line string' do
52
- @lexer.new_token(:SSTRING, 'this is a test')
53
- token = @lexer.new_token(:TEST, 'test')
51
+ it 'calculates the column number for a single line string' do
52
+ lexer.new_token(:SSTRING, 'this is a test')
53
+ token = lexer.new_token(:TEST, 'test')
54
54
  expect(token.column).to eq(17)
55
55
  end
56
56
 
57
- it 'should calculate the column number for a multi line string' do
58
- @lexer.instance_variable_set('@line_no', 4)
59
- @lexer.instance_variable_set('@column', 5)
60
- @lexer.new_token(:SSTRING, "test\ntest")
61
- token = @lexer.new_token(:TEST, 'test')
57
+ it 'calculates the column number for a multi line string' do
58
+ lexer.instance_variable_set('@line_no', 4)
59
+ lexer.instance_variable_set('@column', 5)
60
+ lexer.new_token(:SSTRING, "test\ntest")
61
+ token = lexer.new_token(:TEST, 'test')
62
62
  expect(token.column).to eq(6)
63
63
  end
64
64
  end
65
65
 
66
66
  context '#process_string_segments' do
67
- subject(:tokens) { @lexer.tokens }
68
- subject(:manifest) { @lexer.tokens.map(&:to_manifest).join }
67
+ subject(:tokens) { lexer.tokens }
68
+
69
+ subject(:manifest) { lexer.tokens.map(&:to_manifest).join }
69
70
 
70
71
  before(:each) do
71
- @lexer.process_string_segments(segments)
72
+ lexer.process_string_segments(segments)
72
73
  end
73
74
 
74
75
  context 'an empty string segment' do
@@ -81,10 +82,10 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
81
82
  it 'creates a :STRING token' do
82
83
  expect(tokens).to have(1).token
83
84
  expect(tokens[0]).to have_attributes(
84
- :type => :STRING,
85
- :value => '',
86
- :line => 1,
87
- :column => 1
85
+ type: :STRING,
86
+ value: '',
87
+ line: 1,
88
+ column: 1,
88
89
  )
89
90
  end
90
91
 
@@ -105,22 +106,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
105
106
  it 'creates a tokenised string with an interpolated variable' do
106
107
  expect(tokens).to have(3).tokens
107
108
  expect(tokens[0]).to have_attributes(
108
- :type => :DQPRE,
109
- :value => '',
110
- :line => 1,
111
- :column => 1
109
+ type: :DQPRE,
110
+ value: '',
111
+ line: 1,
112
+ column: 1,
112
113
  )
113
114
  expect(tokens[1]).to have_attributes(
114
- :type => :VARIABLE,
115
- :value => 'foo',
116
- :line => 1,
117
- :column => 4
115
+ type: :VARIABLE,
116
+ value: 'foo',
117
+ line: 1,
118
+ column: 4,
118
119
  )
119
120
  expect(tokens[2]).to have_attributes(
120
- :type => :DQPOST,
121
- :value => 'bar',
122
- :line => 1,
123
- :column => 7
121
+ type: :DQPOST,
122
+ value: 'bar',
123
+ line: 1,
124
+ column: 7,
124
125
  )
125
126
  end
126
127
 
@@ -141,22 +142,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
141
142
  it 'creates a tokenised string with an interpolated variable' do
142
143
  expect(tokens).to have(3).tokens
143
144
  expect(tokens[0]).to have_attributes(
144
- :type => :DQPRE,
145
- :value => 'foo',
146
- :line => 1,
147
- :column => 1
145
+ type: :DQPRE,
146
+ value: 'foo',
147
+ line: 1,
148
+ column: 1,
148
149
  )
149
150
  expect(tokens[1]).to have_attributes(
150
- :type => :VARIABLE,
151
- :value => 'bar',
152
- :line => 1,
153
- :column => 7
151
+ type: :VARIABLE,
152
+ value: 'bar',
153
+ line: 1,
154
+ column: 7,
154
155
  )
155
156
  expect(tokens[2]).to have_attributes(
156
- :type => :DQPOST,
157
- :value => 'baz',
158
- :line => 1,
159
- :column => 10
157
+ type: :DQPOST,
158
+ value: 'baz',
159
+ line: 1,
160
+ column: 10,
160
161
  )
161
162
  end
162
163
 
@@ -180,34 +181,34 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
180
181
  expect(tokens).to have(5).tokens
181
182
 
182
183
  expect(tokens[0]).to have_attributes(
183
- :type => :DQPRE,
184
- :value => 'foo',
185
- :line => 1,
186
- :column => 1
184
+ type: :DQPRE,
185
+ value: 'foo',
186
+ line: 1,
187
+ column: 1,
187
188
  )
188
189
  expect(tokens[1]).to have_attributes(
189
- :type => :VARIABLE,
190
- :value => 'bar',
191
- :line => 1,
192
- :column => 7
190
+ type: :VARIABLE,
191
+ value: 'bar',
192
+ line: 1,
193
+ column: 7,
193
194
  )
194
195
  expect(tokens[2]).to have_attributes(
195
- :type => :DQMID,
196
- :value => 'baz',
197
- :line => 1,
198
- :column => 10
196
+ type: :DQMID,
197
+ value: 'baz',
198
+ line: 1,
199
+ column: 10,
199
200
  )
200
201
  expect(tokens[3]).to have_attributes(
201
- :type => :VARIABLE,
202
- :value => 'gronk',
203
- :line => 1,
204
- :column => 16
202
+ type: :VARIABLE,
203
+ value: 'gronk',
204
+ line: 1,
205
+ column: 16,
205
206
  )
206
207
  expect(tokens[4]).to have_attributes(
207
- :type => :DQPOST,
208
- :value => 'meh',
209
- :line => 1,
210
- :column => 21
208
+ type: :DQPOST,
209
+ value: 'meh',
210
+ line: 1,
211
+ column: 21,
211
212
  )
212
213
  end
213
214
 
@@ -229,22 +230,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
229
230
  expect(tokens).to have(3).tokens
230
231
 
231
232
  expect(tokens[0]).to have_attributes(
232
- :type => :DQPRE,
233
- :value => '',
234
- :line => 1,
235
- :column => 1
233
+ type: :DQPRE,
234
+ value: '',
235
+ line: 1,
236
+ column: 1,
236
237
  )
237
238
  expect(tokens[1]).to have_attributes(
238
- :type => :VARIABLE,
239
- :value => 'foo',
240
- :line => 1,
241
- :column => 4
239
+ type: :VARIABLE,
240
+ value: 'foo',
241
+ line: 1,
242
+ column: 4,
242
243
  )
243
244
  expect(tokens[2]).to have_attributes(
244
- :type => :DQPOST,
245
- :value => '',
246
- :line => 1,
247
- :column => 7
245
+ type: :DQPOST,
246
+ value: '',
247
+ line: 1,
248
+ column: 7,
248
249
  )
249
250
  end
250
251
 
@@ -254,7 +255,7 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
254
255
  end
255
256
 
256
257
  context 'treats a variable named the same as the keyword as a variable' do
257
- PuppetLint::Lexer::KEYWORDS.keys.each do |keyword|
258
+ PuppetLint::Lexer::KEYWORDS.each_key do |keyword|
258
259
  context "for '#{keyword}'" do
259
260
  let(:segments) do
260
261
  [
@@ -268,22 +269,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
268
269
  expect(tokens).to have(3).tokens
269
270
 
270
271
  expect(tokens[0]).to have_attributes(
271
- :type => :DQPRE,
272
- :value => '',
273
- :line => 1,
274
- :column => 1
272
+ type: :DQPRE,
273
+ value: '',
274
+ line: 1,
275
+ column: 1,
275
276
  )
276
277
  expect(tokens[1]).to have_attributes(
277
- :type => :VARIABLE,
278
- :value => keyword,
279
- :line => 1,
280
- :column => 4
278
+ type: :VARIABLE,
279
+ value: keyword,
280
+ line: 1,
281
+ column: 4,
281
282
  )
282
283
  expect(tokens[2]).to have_attributes(
283
- :type => :DQPOST,
284
- :value => '',
285
- :line => 1,
286
- :column => keyword.size + 4
284
+ type: :DQPOST,
285
+ value: '',
286
+ line: 1,
287
+ column: keyword.size + 4,
287
288
  )
288
289
  end
289
290
 
@@ -307,22 +308,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
307
308
  expect(tokens).to have(3).tokens
308
309
 
309
310
  expect(tokens[0]).to have_attributes(
310
- :type => :DQPRE,
311
- :value => '',
312
- :line => 1,
313
- :column => 1
311
+ type: :DQPRE,
312
+ value: '',
313
+ line: 1,
314
+ column: 1,
314
315
  )
315
316
  expect(tokens[1]).to have_attributes(
316
- :type => :VARIABLE,
317
- :value => 'bar',
318
- :line => 1,
319
- :column => 4
317
+ type: :VARIABLE,
318
+ value: 'bar',
319
+ line: 1,
320
+ column: 4,
320
321
  )
321
322
  expect(tokens[2]).to have_attributes(
322
- :type => :DQPOST,
323
- :value => '',
324
- :line => 1,
325
- :column => 8
323
+ type: :DQPOST,
324
+ value: '',
325
+ line: 1,
326
+ column: 8,
326
327
  )
327
328
  end
328
329
 
@@ -344,58 +345,58 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
344
345
  expect(tokens).to have(9).tokens
345
346
 
346
347
  expect(tokens[0]).to have_attributes(
347
- :type => :DQPRE,
348
- :value => '',
349
- :line => 1,
350
- :column => 1
348
+ type: :DQPRE,
349
+ value: '',
350
+ line: 1,
351
+ column: 1,
351
352
  )
352
353
  expect(tokens[1]).to have_attributes(
353
- :type => :VARIABLE,
354
- :value => 'foo',
355
- :line => 1,
356
- :column => 4
354
+ type: :VARIABLE,
355
+ value: 'foo',
356
+ line: 1,
357
+ column: 4,
357
358
  )
358
359
  expect(tokens[2]).to have_attributes(
359
- :type => :LBRACK,
360
- :value => '[',
361
- :line => 1,
362
- :column => 7
360
+ type: :LBRACK,
361
+ value: '[',
362
+ line: 1,
363
+ column: 7,
363
364
  )
364
365
  expect(tokens[3]).to have_attributes(
365
- :type => :NAME,
366
- :value => 'bar',
367
- :line => 1,
368
- :column => 8
366
+ type: :NAME,
367
+ value: 'bar',
368
+ line: 1,
369
+ column: 8,
369
370
  )
370
371
  expect(tokens[4]).to have_attributes(
371
- :type => :RBRACK,
372
- :value => ']',
373
- :line => 1,
374
- :column => 11
372
+ type: :RBRACK,
373
+ value: ']',
374
+ line: 1,
375
+ column: 11,
375
376
  )
376
377
  expect(tokens[5]).to have_attributes(
377
- :type => :LBRACK,
378
- :value => '[',
379
- :line => 1,
380
- :column => 12
378
+ type: :LBRACK,
379
+ value: '[',
380
+ line: 1,
381
+ column: 12,
381
382
  )
382
383
  expect(tokens[6]).to have_attributes(
383
- :type => :NAME,
384
- :value => 'baz',
385
- :line => 1,
386
- :column => 13
384
+ type: :NAME,
385
+ value: 'baz',
386
+ line: 1,
387
+ column: 13,
387
388
  )
388
389
  expect(tokens[7]).to have_attributes(
389
- :type => :RBRACK,
390
- :value => ']',
391
- :line => 1,
392
- :column => 16
390
+ type: :RBRACK,
391
+ value: ']',
392
+ line: 1,
393
+ column: 16,
393
394
  )
394
395
  expect(tokens[8]).to have_attributes(
395
- :type => :DQPOST,
396
- :value => '',
397
- :line => 1,
398
- :column => 17
396
+ type: :DQPOST,
397
+ value: '',
398
+ line: 1,
399
+ column: 17,
399
400
  )
400
401
  end
401
402
 
@@ -419,34 +420,34 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
419
420
  expect(tokens).to have(5).tokens
420
421
 
421
422
  expect(tokens[0]).to have_attributes(
422
- :type => :DQPRE,
423
- :value => '',
424
- :line => 1,
425
- :column => 1
423
+ type: :DQPRE,
424
+ value: '',
425
+ line: 1,
426
+ column: 1,
426
427
  )
427
428
  expect(tokens[1]).to have_attributes(
428
- :type => :VARIABLE,
429
- :value => 'foo',
430
- :line => 1,
431
- :column => 4
429
+ type: :VARIABLE,
430
+ value: 'foo',
431
+ line: 1,
432
+ column: 4,
432
433
  )
433
434
  expect(tokens[2]).to have_attributes(
434
- :type => :DQMID,
435
- :value => '',
436
- :line => 1,
437
- :column => 7
435
+ type: :DQMID,
436
+ value: '',
437
+ line: 1,
438
+ column: 7,
438
439
  )
439
440
  expect(tokens[3]).to have_attributes(
440
- :type => :VARIABLE,
441
- :value => 'bar',
442
- :line => 1,
443
- :column => 10
441
+ type: :VARIABLE,
442
+ value: 'bar',
443
+ line: 1,
444
+ column: 10,
444
445
  )
445
446
  expect(tokens[4]).to have_attributes(
446
- :type => :DQPOST,
447
- :value => '',
448
- :line => 1,
449
- :column => 13
447
+ type: :DQPOST,
448
+ value: '',
449
+ line: 1,
450
+ column: 13,
450
451
  )
451
452
  end
452
453
 
@@ -468,22 +469,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
468
469
  expect(tokens).to have(3).tokens
469
470
 
470
471
  expect(tokens[0]).to have_attributes(
471
- :type => :DQPRE,
472
- :value => '',
473
- :line => 1,
474
- :column => 1
472
+ type: :DQPRE,
473
+ value: '',
474
+ line: 1,
475
+ column: 1,
475
476
  )
476
477
  expect(tokens[1]).to have_attributes(
477
- :type => :UNENC_VARIABLE,
478
- :value => 'foo',
479
- :line => 1,
480
- :column => 2
478
+ type: :UNENC_VARIABLE,
479
+ value: 'foo',
480
+ line: 1,
481
+ column: 2,
481
482
  )
482
483
  expect(tokens[2]).to have_attributes(
483
- :type => :DQPOST,
484
- :value => '',
485
- :line => 1,
486
- :column => 6
484
+ type: :DQPOST,
485
+ value: '',
486
+ line: 1,
487
+ column: 6,
487
488
  )
488
489
  end
489
490
 
@@ -505,22 +506,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
505
506
  expect(tokens).to have(3).tokens
506
507
 
507
508
  expect(tokens[0]).to have_attributes(
508
- :type => :DQPRE,
509
- :value => 'string with ',
510
- :line => 1,
511
- :column => 1
509
+ type: :DQPRE,
510
+ value: 'string with ',
511
+ line: 1,
512
+ column: 1,
512
513
  )
513
514
  expect(tokens[1]).to have_attributes(
514
- :type => :SSTRING,
515
- :value => 'a nested single quoted string',
516
- :line => 1,
517
- :column => 16
515
+ type: :SSTRING,
516
+ value: 'a nested single quoted string',
517
+ line: 1,
518
+ column: 16,
518
519
  )
519
520
  expect(tokens[2]).to have_attributes(
520
- :type => :DQPOST,
521
- :value => ' inside it',
522
- :line => 1,
523
- :column => 47
521
+ type: :DQPOST,
522
+ value: ' inside it',
523
+ line: 1,
524
+ column: 47,
524
525
  )
525
526
  end
526
527
 
@@ -542,58 +543,58 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
542
543
  expect(tokens).to have(9).tokens
543
544
 
544
545
  expect(tokens[0]).to have_attributes(
545
- :type => :DQPRE,
546
- :value => 'string with ',
547
- :line => 1,
548
- :column => 1
546
+ type: :DQPRE,
547
+ value: 'string with ',
548
+ line: 1,
549
+ column: 1,
549
550
  )
550
551
  expect(tokens[1]).to have_attributes(
551
- :type => :LPAREN,
552
- :value => '(',
553
- :line => 1,
554
- :column => 16
552
+ type: :LPAREN,
553
+ value: '(',
554
+ line: 1,
555
+ column: 16,
555
556
  )
556
557
  expect(tokens[2]).to have_attributes(
557
- :type => :NUMBER,
558
- :value => '3',
559
- :line => 1,
560
- :column => 17
558
+ type: :NUMBER,
559
+ value: '3',
560
+ line: 1,
561
+ column: 17,
561
562
  )
562
563
  expect(tokens[3]).to have_attributes(
563
- :type => :PLUS,
564
- :value => '+',
565
- :line => 1,
566
- :column => 18
564
+ type: :PLUS,
565
+ value: '+',
566
+ line: 1,
567
+ column: 18,
567
568
  )
568
569
  expect(tokens[4]).to have_attributes(
569
- :type => :NUMBER,
570
- :value => '5',
571
- :line => 1,
572
- :column => 19
570
+ type: :NUMBER,
571
+ value: '5',
572
+ line: 1,
573
+ column: 19,
573
574
  )
574
575
  expect(tokens[5]).to have_attributes(
575
- :type => :RPAREN,
576
- :value => ')',
577
- :line => 1,
578
- :column => 20
576
+ type: :RPAREN,
577
+ value: ')',
578
+ line: 1,
579
+ column: 20,
579
580
  )
580
581
  expect(tokens[6]).to have_attributes(
581
- :type => :DIV,
582
- :value => '/',
583
- :line => 1,
584
- :column => 21
582
+ type: :DIV,
583
+ value: '/',
584
+ line: 1,
585
+ column: 21,
585
586
  )
586
587
  expect(tokens[7]).to have_attributes(
587
- :type => :NUMBER,
588
- :value => '4',
589
- :line => 1,
590
- :column => 22
588
+ type: :NUMBER,
589
+ value: '4',
590
+ line: 1,
591
+ column: 22,
591
592
  )
592
593
  expect(tokens[8]).to have_attributes(
593
- :type => :DQPOST,
594
- :value => ' nested math',
595
- :line => 1,
596
- :column => 23
594
+ type: :DQPOST,
595
+ value: ' nested math',
596
+ line: 1,
597
+ column: 23,
597
598
  )
598
599
  end
599
600
 
@@ -615,52 +616,52 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
615
616
  expect(tokens).to have(8).tokens
616
617
 
617
618
  expect(tokens[0]).to have_attributes(
618
- :type => :DQPRE,
619
- :value => 'string with ',
620
- :line => 1,
621
- :column => 1
619
+ type: :DQPRE,
620
+ value: 'string with ',
621
+ line: 1,
622
+ column: 1,
622
623
  )
623
624
  expect(tokens[1]).to have_attributes(
624
- :type => :LBRACK,
625
- :value => '[',
626
- :line => 1,
627
- :column => 16
625
+ type: :LBRACK,
626
+ value: '[',
627
+ line: 1,
628
+ column: 16,
628
629
  )
629
630
  expect(tokens[2]).to have_attributes(
630
- :type => :SSTRING,
631
- :value => 'an array ',
632
- :line => 1,
633
- :column => 17
631
+ type: :SSTRING,
632
+ value: 'an array ',
633
+ line: 1,
634
+ column: 17,
634
635
  )
635
636
  expect(tokens[3]).to have_attributes(
636
- :type => :COMMA,
637
- :value => ',',
638
- :line => 1,
639
- :column => 28
637
+ type: :COMMA,
638
+ value: ',',
639
+ line: 1,
640
+ column: 28,
640
641
  )
641
642
  expect(tokens[4]).to have_attributes(
642
- :type => :WHITESPACE,
643
- :value => ' ',
644
- :line => 1,
645
- :column => 29
643
+ type: :WHITESPACE,
644
+ value: ' ',
645
+ line: 1,
646
+ column: 29,
646
647
  )
647
648
  expect(tokens[5]).to have_attributes(
648
- :type => :VARIABLE,
649
- :value => 'v2',
650
- :line => 1,
651
- :column => 30
649
+ type: :VARIABLE,
650
+ value: 'v2',
651
+ line: 1,
652
+ column: 30,
652
653
  )
653
654
  expect(tokens[6]).to have_attributes(
654
- :type => :RBRACK,
655
- :value => ']',
656
- :line => 1,
657
- :column => 33
655
+ type: :RBRACK,
656
+ value: ']',
657
+ line: 1,
658
+ column: 33,
658
659
  )
659
660
  expect(tokens[7]).to have_attributes(
660
- :type => :DQPOST,
661
- :value => ' in it',
662
- :line => 1,
663
- :column => 34
661
+ type: :DQPOST,
662
+ value: ' in it',
663
+ line: 1,
664
+ column: 34,
664
665
  )
665
666
  end
666
667
 
@@ -684,34 +685,34 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
684
685
  expect(tokens).to have(5).tokens
685
686
 
686
687
  expect(tokens[0]).to have_attributes(
687
- :type => :DQPRE,
688
- :value => '',
689
- :line => 1,
690
- :column => 1
688
+ type: :DQPRE,
689
+ value: '',
690
+ line: 1,
691
+ column: 1,
691
692
  )
692
693
  expect(tokens[1]).to have_attributes(
693
- :type => :UNENC_VARIABLE,
694
- :value => 'foo',
695
- :line => 1,
696
- :column => 2
694
+ type: :UNENC_VARIABLE,
695
+ value: 'foo',
696
+ line: 1,
697
+ column: 2,
697
698
  )
698
699
  expect(tokens[2]).to have_attributes(
699
- :type => :DQMID,
700
- :value => '',
701
- :line => 1,
702
- :column => 6
700
+ type: :DQMID,
701
+ value: '',
702
+ line: 1,
703
+ column: 6,
703
704
  )
704
705
  expect(tokens[3]).to have_attributes(
705
- :type => :UNENC_VARIABLE,
706
- :value => 'bar',
707
- :line => 1,
708
- :column => 6
706
+ type: :UNENC_VARIABLE,
707
+ value: 'bar',
708
+ line: 1,
709
+ column: 6,
709
710
  )
710
711
  expect(tokens[4]).to have_attributes(
711
- :type => :DQPOST,
712
- :value => '',
713
- :line => 1,
714
- :column => 10
712
+ type: :DQPOST,
713
+ value: '',
714
+ line: 1,
715
+ column: 10,
715
716
  )
716
717
  end
717
718
 
@@ -733,22 +734,22 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
733
734
  expect(tokens).to have(3).tokens
734
735
 
735
736
  expect(tokens[0]).to have_attributes(
736
- :type => :DQPRE,
737
- :value => 'foo',
738
- :line => 1,
739
- :column => 1
737
+ type: :DQPRE,
738
+ value: 'foo',
739
+ line: 1,
740
+ column: 1,
740
741
  )
741
742
  expect(tokens[1]).to have_attributes(
742
- :type => :UNENC_VARIABLE,
743
- :value => 'bar',
744
- :line => 1,
745
- :column => 5
743
+ type: :UNENC_VARIABLE,
744
+ value: 'bar',
745
+ line: 1,
746
+ column: 5,
746
747
  )
747
748
  expect(tokens[2]).to have_attributes(
748
- :type => :DQPOST,
749
- :value => '$',
750
- :line => 1,
751
- :column => 9
749
+ type: :DQPOST,
750
+ value: '$',
751
+ line: 1,
752
+ column: 9,
752
753
  )
753
754
  end
754
755
 
@@ -772,94 +773,94 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
772
773
  expect(tokens).to have(15).tokens
773
774
 
774
775
  expect(tokens[0]).to have_attributes(
775
- :type => :DQPRE,
776
- :value => '',
777
- :line => 1,
778
- :column => 1
776
+ type: :DQPRE,
777
+ value: '',
778
+ line: 1,
779
+ column: 1,
779
780
  )
780
781
  expect(tokens[1]).to have_attributes(
781
- :type => :VARIABLE,
782
- :value => 'key',
783
- :line => 1,
784
- :column => 4
782
+ type: :VARIABLE,
783
+ value: 'key',
784
+ line: 1,
785
+ column: 4,
785
786
  )
786
787
  expect(tokens[2]).to have_attributes(
787
- :type => :DQMID,
788
- :value => ' ',
789
- :line => 1,
790
- :column => 7
788
+ type: :DQMID,
789
+ value: ' ',
790
+ line: 1,
791
+ column: 7,
791
792
  )
792
793
  expect(tokens[3]).to have_attributes(
793
- :type => :FUNCTION_NAME,
794
- :value => 'flatten',
795
- :line => 1,
796
- :column => 11
794
+ type: :FUNCTION_NAME,
795
+ value: 'flatten',
796
+ line: 1,
797
+ column: 11,
797
798
  )
798
799
  expect(tokens[4]).to have_attributes(
799
- :type => :LPAREN,
800
- :value => '(',
801
- :line => 1,
802
- :column => 18
800
+ type: :LPAREN,
801
+ value: '(',
802
+ line: 1,
803
+ column: 18,
803
804
  )
804
805
  expect(tokens[5]).to have_attributes(
805
- :type => :LBRACK,
806
- :value => '[',
807
- :line => 1,
808
- :column => 19
806
+ type: :LBRACK,
807
+ value: '[',
808
+ line: 1,
809
+ column: 19,
809
810
  )
810
811
  expect(tokens[6]).to have_attributes(
811
- :type => :VARIABLE,
812
- :value => 'value',
813
- :line => 1,
814
- :column => 20
812
+ type: :VARIABLE,
813
+ value: 'value',
814
+ line: 1,
815
+ column: 20,
815
816
  )
816
817
  expect(tokens[7]).to have_attributes(
817
- :type => :RBRACK,
818
- :value => ']',
819
- :line => 1,
820
- :column => 26
818
+ type: :RBRACK,
819
+ value: ']',
820
+ line: 1,
821
+ column: 26,
821
822
  )
822
823
  expect(tokens[8]).to have_attributes(
823
- :type => :RPAREN,
824
- :value => ')',
825
- :line => 1,
826
- :column => 27
824
+ type: :RPAREN,
825
+ value: ')',
826
+ line: 1,
827
+ column: 27,
827
828
  )
828
829
  expect(tokens[9]).to have_attributes(
829
- :type => :DOT,
830
- :value => '.',
831
- :line => 1,
832
- :column => 28
830
+ type: :DOT,
831
+ value: '.',
832
+ line: 1,
833
+ column: 28,
833
834
  )
834
835
  expect(tokens[10]).to have_attributes(
835
- :type => :FUNCTION_NAME,
836
- :value => 'join',
837
- :line => 1,
838
- :column => 29
836
+ type: :FUNCTION_NAME,
837
+ value: 'join',
838
+ line: 1,
839
+ column: 29,
839
840
  )
840
841
  expect(tokens[11]).to have_attributes(
841
- :type => :LPAREN,
842
- :value => '(',
843
- :line => 1,
844
- :column => 33
842
+ type: :LPAREN,
843
+ value: '(',
844
+ line: 1,
845
+ column: 33,
845
846
  )
846
847
  expect(tokens[12]).to have_attributes(
847
- :type => :STRING,
848
- :value => '\nkey ',
849
- :line => 1,
850
- :column => 34
848
+ type: :STRING,
849
+ value: '\nkey ',
850
+ line: 1,
851
+ column: 34,
851
852
  )
852
853
  expect(tokens[13]).to have_attributes(
853
- :type => :RPAREN,
854
- :value => ')',
855
- :line => 1,
856
- :column => 42
854
+ type: :RPAREN,
855
+ value: ')',
856
+ line: 1,
857
+ column: 42,
857
858
  )
858
859
  expect(tokens[14]).to have_attributes(
859
- :type => :DQPOST,
860
- :value => '',
861
- :line => 1,
862
- :column => 43
860
+ type: :DQPOST,
861
+ value: '',
862
+ line: 1,
863
+ column: 43,
863
864
  )
864
865
  end
865
866
 
@@ -883,94 +884,94 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
883
884
  expect(tokens).to have(15).tokens
884
885
 
885
886
  expect(tokens[0]).to have_attributes(
886
- :type => :DQPRE,
887
- :value => '',
888
- :line => 1,
889
- :column => 1
887
+ type: :DQPRE,
888
+ value: '',
889
+ line: 1,
890
+ column: 1,
890
891
  )
891
892
  expect(tokens[1]).to have_attributes(
892
- :type => :VARIABLE,
893
- :value => 'facts',
894
- :line => 1,
895
- :column => 4
893
+ type: :VARIABLE,
894
+ value: 'facts',
895
+ line: 1,
896
+ column: 4,
896
897
  )
897
898
  expect(tokens[2]).to have_attributes(
898
- :type => :LBRACK,
899
- :value => '[',
900
- :line => 1,
901
- :column => 9
899
+ type: :LBRACK,
900
+ value: '[',
901
+ line: 1,
902
+ column: 9,
902
903
  )
903
904
  expect(tokens[3]).to have_attributes(
904
- :type => :DQPRE,
905
- :value => 'network_',
906
- :line => 1,
907
- :column => 10
905
+ type: :DQPRE,
906
+ value: 'network_',
907
+ line: 1,
908
+ column: 10,
908
909
  )
909
910
  expect(tokens[4]).to have_attributes(
910
- :type => :VARIABLE,
911
- :value => 'iface',
912
- :line => 1,
913
- :column => 21
911
+ type: :VARIABLE,
912
+ value: 'iface',
913
+ line: 1,
914
+ column: 21,
914
915
  )
915
916
  expect(tokens[5]).to have_attributes(
916
- :type => :DQPOST,
917
- :value => '',
918
- :line => 1,
919
- :column => 26
917
+ type: :DQPOST,
918
+ value: '',
919
+ line: 1,
920
+ column: 26,
920
921
  )
921
922
  expect(tokens[6]).to have_attributes(
922
- :type => :RBRACK,
923
- :value => ']',
924
- :line => 1,
925
- :column => 28
923
+ type: :RBRACK,
924
+ value: ']',
925
+ line: 1,
926
+ column: 28,
926
927
  )
927
928
  expect(tokens[7]).to have_attributes(
928
- :type => :DQMID,
929
- :value => '/',
930
- :line => 1,
931
- :column => 29
929
+ type: :DQMID,
930
+ value: '/',
931
+ line: 1,
932
+ column: 29,
932
933
  )
933
934
  expect(tokens[8]).to have_attributes(
934
- :type => :VARIABLE,
935
- :value => 'facts',
936
- :line => 1,
937
- :column => 33
935
+ type: :VARIABLE,
936
+ value: 'facts',
937
+ line: 1,
938
+ column: 33,
938
939
  )
939
940
  expect(tokens[9]).to have_attributes(
940
- :type => :LBRACK,
941
- :value => '[',
942
- :line => 1,
943
- :column => 38
941
+ type: :LBRACK,
942
+ value: '[',
943
+ line: 1,
944
+ column: 38,
944
945
  )
945
946
  expect(tokens[10]).to have_attributes(
946
- :type => :DQPRE,
947
- :value => 'netmask_',
948
- :line => 1,
949
- :column => 39
947
+ type: :DQPRE,
948
+ value: 'netmask_',
949
+ line: 1,
950
+ column: 39,
950
951
  )
951
952
  expect(tokens[11]).to have_attributes(
952
- :type => :VARIABLE,
953
- :value => 'iface',
954
- :line => 1,
955
- :column => 50
953
+ type: :VARIABLE,
954
+ value: 'iface',
955
+ line: 1,
956
+ column: 50,
956
957
  )
957
958
  expect(tokens[12]).to have_attributes(
958
- :type => :DQPOST,
959
- :value => '',
960
- :line => 1,
961
- :column => 55
959
+ type: :DQPOST,
960
+ value: '',
961
+ line: 1,
962
+ column: 55,
962
963
  )
963
964
  expect(tokens[13]).to have_attributes(
964
- :type => :RBRACK,
965
- :value => ']',
966
- :line => 1,
967
- :column => 57
965
+ type: :RBRACK,
966
+ value: ']',
967
+ line: 1,
968
+ column: 57,
968
969
  )
969
970
  expect(tokens[14]).to have_attributes(
970
- :type => :DQPOST,
971
- :value => '',
972
- :line => 1,
973
- :column => 58
971
+ type: :DQPOST,
972
+ value: '',
973
+ line: 1,
974
+ column: 58,
974
975
  )
975
976
  end
976
977
 
@@ -992,112 +993,112 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
992
993
  expect(tokens).to have(18).tokens
993
994
 
994
995
  expect(tokens[0]).to have_attributes(
995
- :type => :DQPRE,
996
- :value => '',
997
- :line => 1,
998
- :column => 1
996
+ type: :DQPRE,
997
+ value: '',
998
+ line: 1,
999
+ column: 1,
999
1000
  )
1000
1001
  expect(tokens[1]).to have_attributes(
1001
- :type => :VARIABLE,
1002
- :value => 'foo',
1003
- :line => 1,
1004
- :column => 4
1002
+ type: :VARIABLE,
1003
+ value: 'foo',
1004
+ line: 1,
1005
+ column: 4,
1005
1006
  )
1006
1007
  expect(tokens[2]).to have_attributes(
1007
- :type => :DOT,
1008
- :value => '.',
1009
- :line => 1,
1010
- :column => 8
1008
+ type: :DOT,
1009
+ value: '.',
1010
+ line: 1,
1011
+ column: 8,
1011
1012
  )
1012
1013
  expect(tokens[3]).to have_attributes(
1013
- :type => :NAME,
1014
- :value => 'map',
1015
- :line => 1,
1016
- :column => 9
1014
+ type: :NAME,
1015
+ value: 'map',
1016
+ line: 1,
1017
+ column: 9,
1017
1018
  )
1018
1019
  expect(tokens[4]).to have_attributes(
1019
- :type => :WHITESPACE,
1020
- :value => ' ',
1021
- :line => 1,
1022
- :column => 12
1020
+ type: :WHITESPACE,
1021
+ value: ' ',
1022
+ line: 1,
1023
+ column: 12,
1023
1024
  )
1024
1025
  expect(tokens[5]).to have_attributes(
1025
- :type => :PIPE,
1026
- :value => '|',
1027
- :line => 1,
1028
- :column => 13
1026
+ type: :PIPE,
1027
+ value: '|',
1028
+ line: 1,
1029
+ column: 13,
1029
1030
  )
1030
1031
  expect(tokens[6]).to have_attributes(
1031
- :type => :VARIABLE,
1032
- :value => 'bar',
1033
- :line => 1,
1034
- :column => 14
1032
+ type: :VARIABLE,
1033
+ value: 'bar',
1034
+ line: 1,
1035
+ column: 14,
1035
1036
  )
1036
1037
  expect(tokens[7]).to have_attributes(
1037
- :type => :PIPE,
1038
- :value => '|',
1039
- :line => 1,
1040
- :column => 18
1038
+ type: :PIPE,
1039
+ value: '|',
1040
+ line: 1,
1041
+ column: 18,
1041
1042
  )
1042
1043
  expect(tokens[8]).to have_attributes(
1043
- :type => :WHITESPACE,
1044
- :value => ' ',
1045
- :line => 1,
1046
- :column => 19
1044
+ type: :WHITESPACE,
1045
+ value: ' ',
1046
+ line: 1,
1047
+ column: 19,
1047
1048
  )
1048
1049
  expect(tokens[9]).to have_attributes(
1049
- :type => :LBRACE,
1050
- :value => '{',
1051
- :line => 1,
1052
- :column => 20
1050
+ type: :LBRACE,
1051
+ value: '{',
1052
+ line: 1,
1053
+ column: 20,
1053
1054
  )
1054
1055
  expect(tokens[10]).to have_attributes(
1055
- :type => :WHITESPACE,
1056
- :value => ' ',
1057
- :line => 1,
1058
- :column => 21
1056
+ type: :WHITESPACE,
1057
+ value: ' ',
1058
+ line: 1,
1059
+ column: 21,
1059
1060
  )
1060
1061
  expect(tokens[11]).to have_attributes(
1061
- :type => :FUNCTION_NAME,
1062
- :value => 'something',
1063
- :line => 1,
1064
- :column => 22
1062
+ type: :FUNCTION_NAME,
1063
+ value: 'something',
1064
+ line: 1,
1065
+ column: 22,
1065
1066
  )
1066
1067
  expect(tokens[12]).to have_attributes(
1067
- :type => :LPAREN,
1068
- :value => '(',
1069
- :line => 1,
1070
- :column => 31
1068
+ type: :LPAREN,
1069
+ value: '(',
1070
+ line: 1,
1071
+ column: 31,
1071
1072
  )
1072
1073
  expect(tokens[13]).to have_attributes(
1073
- :type => :VARIABLE,
1074
- :value => 'bar',
1075
- :line => 1,
1076
- :column => 32
1074
+ type: :VARIABLE,
1075
+ value: 'bar',
1076
+ line: 1,
1077
+ column: 32,
1077
1078
  )
1078
1079
  expect(tokens[14]).to have_attributes(
1079
- :type => :RPAREN,
1080
- :value => ')',
1081
- :line => 1,
1082
- :column => 36
1080
+ type: :RPAREN,
1081
+ value: ')',
1082
+ line: 1,
1083
+ column: 36,
1083
1084
  )
1084
1085
  expect(tokens[15]).to have_attributes(
1085
- :type => :WHITESPACE,
1086
- :value => ' ',
1087
- :line => 1,
1088
- :column => 37
1086
+ type: :WHITESPACE,
1087
+ value: ' ',
1088
+ line: 1,
1089
+ column: 37,
1089
1090
  )
1090
1091
  expect(tokens[16]).to have_attributes(
1091
- :type => :RBRACE,
1092
- :value => '}',
1093
- :line => 1,
1094
- :column => 38
1092
+ type: :RBRACE,
1093
+ value: '}',
1094
+ line: 1,
1095
+ column: 38,
1095
1096
  )
1096
1097
  expect(tokens[17]).to have_attributes(
1097
- :type => :DQPOST,
1098
- :value => '',
1099
- :line => 1,
1100
- :column => 39
1098
+ type: :DQPOST,
1099
+ value: '',
1100
+ line: 1,
1101
+ column: 39,
1101
1102
  )
1102
1103
  end
1103
1104
 
@@ -1108,7 +1109,7 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
1108
1109
  end
1109
1110
 
1110
1111
  context ':STRING / :DQ' do
1111
- it 'should handle a string with newline characters' do
1112
+ it 'handles a string with newline characters' do
1112
1113
  # rubocop:disable Layout/TrailingWhitespace
1113
1114
  manifest = <<END
1114
1115
  exec {
@@ -1121,7 +1122,7 @@ describe PuppetLint::Lexer do # rubocop:disable Metrics/BlockLength
1121
1122
  }
1122
1123
  END
1123
1124
  # rubocop:enable Layout/TrailingWhitespace
1124
- tokens = @lexer.tokenise(manifest)
1125
+ tokens = lexer.tokenise(manifest)
1125
1126
 
1126
1127
  expect(tokens.length).to eq(34)
1127
1128
 
@@ -1263,58 +1264,40 @@ END
1263
1264
  expect(tokens[33].column).to eq(4)
1264
1265
  end
1265
1266
 
1266
- it 'should calculate the column number correctly after an enclosed variable' do
1267
- token = @lexer.tokenise(' "${foo}" =>').last
1267
+ it 'calculates the column number correctly after an enclosed variable' do
1268
+ token = lexer.tokenise(' "${foo}" =>').last
1268
1269
  expect(token.type).to eq(:FARROW)
1269
1270
  expect(token.column).to eq(12)
1270
1271
  end
1271
1272
 
1272
- it 'should calculate the column number correctly after an enclosed variable starting with a string' do
1273
- token = @lexer.tokenise(' "bar${foo}" =>').last
1273
+ it 'calculates the column number correctly after an enclosed variable starting with a string' do
1274
+ token = lexer.tokenise(' "bar${foo}" =>').last
1274
1275
  expect(token.type).to eq(:FARROW)
1275
1276
  expect(token.column).to eq(15)
1276
1277
  end
1277
1278
 
1278
- it 'should calculate the column number correctly after an enclosed variable ending with a string' do
1279
- token = @lexer.tokenise(' "${foo}bar" =>').last
1279
+ it 'calculates the column number correctly after an enclosed variable ending with a string' do
1280
+ token = lexer.tokenise(' "${foo}bar" =>').last
1280
1281
  expect(token.type).to eq(:FARROW)
1281
1282
  expect(token.column).to eq(15)
1282
1283
  end
1283
1284
 
1284
- it 'should calculate the column number correctly after an enclosed variable surround by a string' do
1285
- token = @lexer.tokenise(' "foo${bar}baz" =>').last
1285
+ it 'calculates the column number correctly after an enclosed variable surround by a string' do
1286
+ token = lexer.tokenise(' "foo${bar}baz" =>').last
1286
1287
  expect(token.type).to eq(:FARROW)
1287
1288
  expect(token.column).to eq(18)
1288
1289
  end
1289
1290
 
1290
- it 'should not enclose variable with a chained function call' do
1291
+ it 'does not enclose variable with a chained function call' do
1291
1292
  manifest = '"This is ${a.test}"'
1292
- tokens = @lexer.tokenise(manifest)
1293
+ tokens = lexer.tokenise(manifest)
1293
1294
  expect(tokens.map(&:to_manifest).join('')).to eq(manifest)
1294
1295
  end
1295
1296
  end
1296
1297
 
1297
- %w[
1298
- case
1299
- class
1300
- default
1301
- define
1302
- import
1303
- if
1304
- elsif
1305
- else
1306
- inherits
1307
- node
1308
- and
1309
- or
1310
- undef
1311
- true
1312
- false
1313
- in
1314
- unless
1315
- ].each do |keyword|
1316
- it "should handle '#{keyword}' as a keyword" do
1317
- token = @lexer.tokenise(keyword).first
1298
+ ['case', 'class', 'default', 'define', 'import', 'if', 'elsif', 'else', 'inherits', 'node', 'and', 'or', 'undef', 'true', 'false', 'in', 'unless'].each do |keyword|
1299
+ it "handles '#{keyword}' as a keyword" do
1300
+ token = lexer.tokenise(keyword).first
1318
1301
  expect(token.type).to eq(keyword.upcase.to_sym)
1319
1302
  expect(token.value).to eq(keyword)
1320
1303
  end
@@ -1367,40 +1350,40 @@ END
1367
1350
  [:NEWLINE, "\n"],
1368
1351
  [:NEWLINE, "\r\n"],
1369
1352
  ].each do |name, string|
1370
- it "should have a token named '#{name}'" do
1371
- token = @lexer.tokenise(string).first
1353
+ it "has a token named '#{name}'" do
1354
+ token = lexer.tokenise(string).first
1372
1355
  expect(token.type).to eq(name)
1373
1356
  expect(token.value).to eq(string)
1374
1357
  end
1375
1358
  end
1376
1359
 
1377
1360
  context ':TYPE' do
1378
- it 'should match Data Types' do
1379
- token = @lexer.tokenise('Integer').first
1361
+ it 'matches Data Types' do
1362
+ token = lexer.tokenise('Integer').first
1380
1363
  expect(token.type).to eq(:TYPE)
1381
1364
  expect(token.value).to eq('Integer')
1382
1365
  end
1383
1366
 
1384
- it 'should match Catalog Types' do
1385
- token = @lexer.tokenise('Resource').first
1367
+ it 'matches Catalog Types' do
1368
+ token = lexer.tokenise('Resource').first
1386
1369
  expect(token.type).to eq(:TYPE)
1387
1370
  expect(token.value).to eq('Resource')
1388
1371
  end
1389
1372
 
1390
- it 'should match Abstract Types' do
1391
- token = @lexer.tokenise('Collection').first
1373
+ it 'matches Abstract Types' do
1374
+ token = lexer.tokenise('Collection').first
1392
1375
  expect(token.type).to eq(:TYPE)
1393
1376
  expect(token.value).to eq('Collection')
1394
1377
  end
1395
1378
 
1396
1379
  describe 'Platform Types' do
1397
- it 'should match Callable' do
1398
- token = @lexer.tokenise('Callable').first
1380
+ it 'matches Callable' do
1381
+ token = lexer.tokenise('Callable').first
1399
1382
  expect(token.type).to eq(:TYPE)
1400
1383
  expect(token.value).to eq('Callable')
1401
1384
  end
1402
- it 'should match Sensitive' do
1403
- token = @lexer.tokenise('Sensitive').first
1385
+ it 'matches Sensitive' do
1386
+ token = lexer.tokenise('Sensitive').first
1404
1387
  expect(token.type).to eq(:TYPE)
1405
1388
  expect(token.value).to eq('Sensitive')
1406
1389
  end
@@ -1408,7 +1391,7 @@ END
1408
1391
  end
1409
1392
 
1410
1393
  context ':HEREDOC without interpolation' do
1411
- it 'should parse a simple heredoc' do
1394
+ it 'parses a simple heredoc' do
1412
1395
  manifest = <<-END.gsub(%r{^ {6}}, '')
1413
1396
  $str = @(myheredoc)
1414
1397
  SOMETHING
@@ -1416,7 +1399,7 @@ END
1416
1399
  :
1417
1400
  |-myheredoc
1418
1401
  END
1419
- tokens = @lexer.tokenise(manifest)
1402
+ tokens = lexer.tokenise(manifest)
1420
1403
 
1421
1404
  expect(tokens.length).to eq(8)
1422
1405
  expect(tokens[0].type).to eq(:VARIABLE)
@@ -1453,7 +1436,7 @@ END
1453
1436
  expect(tokens[7].column).to eq(14)
1454
1437
  end
1455
1438
 
1456
- it 'should not interpolate the contents of the heredoc' do
1439
+ it 'does not interpolate the contents of the heredoc' do
1457
1440
  manifest = <<-END.gsub(%r{^ {6}}, '')
1458
1441
  $str = @(myheredoc)
1459
1442
  SOMETHING
@@ -1461,7 +1444,7 @@ END
1461
1444
  :
1462
1445
  |-myheredoc
1463
1446
  END
1464
- tokens = @lexer.tokenise(manifest)
1447
+ tokens = lexer.tokenise(manifest)
1465
1448
 
1466
1449
  expect(tokens.length).to eq(8)
1467
1450
  expect(tokens[0].type).to eq(:VARIABLE)
@@ -1499,7 +1482,7 @@ END
1499
1482
  expect(tokens[7].column).to eq(14)
1500
1483
  end
1501
1484
 
1502
- it 'should handle multiple heredoc declarations on a single line' do
1485
+ it 'handles multiple heredoc declarations on a single line' do
1503
1486
  manifest = <<-END.gsub(%r{^ {6}}, '')
1504
1487
  $str = "${@(end1)} ${@(end2)}"
1505
1488
  foo
@@ -1507,7 +1490,7 @@ END
1507
1490
  bar
1508
1491
  |-end2
1509
1492
  END
1510
- tokens = @lexer.tokenise(manifest)
1493
+ tokens = lexer.tokenise(manifest)
1511
1494
 
1512
1495
  expect(tokens.length).to eq(14)
1513
1496
  expect(tokens[0].type).to eq(:VARIABLE)
@@ -1570,7 +1553,7 @@ END
1570
1553
  expect(tokens[13].column).to eq(9)
1571
1554
  end
1572
1555
 
1573
- it 'should handle a heredoc that specifies a syntax' do
1556
+ it 'handles a heredoc that specifies a syntax' do
1574
1557
  manifest = <<-END.gsub(%r{^ {6}}, '')
1575
1558
  $str = @("end":json/)
1576
1559
  {
@@ -1579,7 +1562,7 @@ END
1579
1562
  |-end
1580
1563
  END
1581
1564
 
1582
- tokens = @lexer.tokenise(manifest)
1565
+ tokens = lexer.tokenise(manifest)
1583
1566
 
1584
1567
  expect(tokens.length).to eq(8)
1585
1568
  expect(tokens[0].type).to eq(:VARIABLE)
@@ -1617,13 +1600,13 @@ END
1617
1600
  expect(tokens[7].column).to eq(8)
1618
1601
  end
1619
1602
 
1620
- it 'should handle a heredoc with spaces in the tag' do
1603
+ it 'handles a heredoc with spaces in the tag' do
1621
1604
  manifest = <<-END.gsub(%r{^ {6}}, '')
1622
1605
  $str = @("myheredoc" /)
1623
1606
  foo
1624
1607
  |-myheredoc
1625
1608
  END
1626
- tokens = @lexer.tokenise(manifest)
1609
+ tokens = lexer.tokenise(manifest)
1627
1610
  expect(tokens.length).to eq(8)
1628
1611
 
1629
1612
  expect(tokens[4].type).to eq(:HEREDOC_OPEN)
@@ -1632,13 +1615,13 @@ END
1632
1615
  expect(tokens[6].value).to eq(" foo\n ")
1633
1616
  end
1634
1617
 
1635
- it 'should handle a heredoc with no indentation' do
1618
+ it 'handles a heredoc with no indentation' do
1636
1619
  manifest = <<-END.gsub(%r{^ {6}}, '')
1637
1620
  $str = @(EOT)
1638
1621
  something
1639
1622
  EOT
1640
1623
  END
1641
- tokens = @lexer.tokenise(manifest)
1624
+ tokens = lexer.tokenise(manifest)
1642
1625
 
1643
1626
  expect(tokens.length).to eq(8)
1644
1627
  expect(tokens[4].type).to eq(:HEREDOC_OPEN)
@@ -1649,7 +1632,7 @@ END
1649
1632
  end
1650
1633
 
1651
1634
  context ':HEREDOC with interpolation' do
1652
- it 'should parse a heredoc with no interpolated values as a :HEREDOC' do
1635
+ it 'parses a heredoc with no interpolated values as a :HEREDOC' do
1653
1636
  manifest = <<-END.gsub(%r{^ {6}}, '')
1654
1637
  $str = @("myheredoc"/)
1655
1638
  SOMETHING
@@ -1657,7 +1640,7 @@ END
1657
1640
  :
1658
1641
  |-myheredoc
1659
1642
  END
1660
- tokens = @lexer.tokenise(manifest)
1643
+ tokens = lexer.tokenise(manifest)
1661
1644
 
1662
1645
  expect(tokens[0].type).to eq(:VARIABLE)
1663
1646
  expect(tokens[0].value).to eq('str')
@@ -1694,7 +1677,7 @@ END
1694
1677
  expect(tokens[7].column).to eq(14)
1695
1678
  end
1696
1679
 
1697
- it 'should parse a heredoc with interpolated values' do
1680
+ it 'parses a heredoc with interpolated values' do
1698
1681
  manifest = <<-END.gsub(%r{^ {6}}, '')
1699
1682
  $str = @("myheredoc"/)
1700
1683
  SOMETHING
@@ -1705,7 +1688,7 @@ END
1705
1688
  | myheredoc
1706
1689
  END
1707
1690
 
1708
- tokens = @lexer.tokenise(manifest)
1691
+ tokens = lexer.tokenise(manifest)
1709
1692
  expect(tokens.map(&:to_manifest).join('')).to eq(manifest)
1710
1693
 
1711
1694
  expect(tokens[0].type).to eq(:VARIABLE)
@@ -1758,13 +1741,13 @@ END
1758
1741
  expect(tokens[10].column).to eq(11)
1759
1742
  end
1760
1743
 
1761
- it 'should not remove the unnecessary $ from enclosed variables' do
1744
+ it 'does not remove the unnecessary $ from enclosed variables' do
1762
1745
  manifest = <<-END.gsub(%r{^ {6}}, '')
1763
1746
  $str = @("myheredoc"/)
1764
1747
  ${$myvar}
1765
1748
  |-myheredoc
1766
1749
  END
1767
- tokens = @lexer.tokenise(manifest)
1750
+ tokens = lexer.tokenise(manifest)
1768
1751
 
1769
1752
  expect(tokens.length).to eq(10)
1770
1753
 
@@ -1778,284 +1761,283 @@ END
1778
1761
  end
1779
1762
 
1780
1763
  context ':CLASSREF' do
1781
- it 'should match single capitalised alphanumeric term' do
1782
- token = @lexer.tokenise('One').first
1764
+ it 'matches single capitalised alphanumeric term' do
1765
+ token = lexer.tokenise('One').first
1783
1766
  expect(token.type).to eq(:CLASSREF)
1784
1767
  expect(token.value).to eq('One')
1785
1768
  end
1786
1769
 
1787
- it 'should match two capitalised alphanumeric terms sep by ::' do
1788
- token = @lexer.tokenise('One::Two').first
1770
+ it 'matches two capitalised alphanumeric terms sep by ::' do
1771
+ token = lexer.tokenise('One::Two').first
1789
1772
  expect(token.type).to eq(:CLASSREF)
1790
1773
  expect(token.value).to eq('One::Two')
1791
1774
  end
1792
1775
 
1793
- it 'should match many capitalised alphanumeric terms sep by ::' do
1794
- token = @lexer.tokenise('One::Two::Three::Four::Five').first
1776
+ it 'matches many capitalised alphanumeric terms sep by ::' do
1777
+ token = lexer.tokenise('One::Two::Three::Four::Five').first
1795
1778
  expect(token.type).to eq(:CLASSREF)
1796
1779
  expect(token.value).to eq('One::Two::Three::Four::Five')
1797
1780
  end
1798
1781
 
1799
- it 'should match capitalised terms prefixed by ::' do
1800
- token = @lexer.tokenise('::One').first
1782
+ it 'matches capitalised terms prefixed by ::' do
1783
+ token = lexer.tokenise('::One').first
1801
1784
  expect(token.type).to eq(:CLASSREF)
1802
1785
  expect(token.value).to eq('::One')
1803
1786
  end
1804
1787
 
1805
- it 'should match terms that start with Types' do
1806
- token = @lexer.tokenise('Regexp_foo').first
1788
+ it 'matches terms that start with Types' do
1789
+ token = lexer.tokenise('Regexp_foo').first
1807
1790
  expect(token.type).to eq(:CLASSREF)
1808
1791
  expect(token.value).to eq('Regexp_foo')
1809
1792
  end
1810
1793
  end
1811
1794
 
1812
1795
  context ':NAME' do
1813
- it 'should match lowercase alphanumeric terms' do
1814
- token = @lexer.tokenise('one-two').first
1796
+ it 'matches lowercase alphanumeric terms' do
1797
+ token = lexer.tokenise('one-two').first
1815
1798
  expect(token.type).to eq(:NAME)
1816
1799
  expect(token.value).to eq('one-two')
1817
1800
  end
1818
1801
 
1819
- it 'should match lowercase alphanumeric terms sep by ::' do
1820
- token = @lexer.tokenise('one::two').first
1802
+ it 'matches lowercase alphanumeric terms sep by ::' do
1803
+ token = lexer.tokenise('one::two').first
1821
1804
  expect(token.type).to eq(:NAME)
1822
1805
  expect(token.value).to eq('one::two')
1823
1806
  end
1824
1807
 
1825
- it 'should match many lowercase alphanumeric terms sep by ::' do
1826
- token = @lexer.tokenise('one::two::three::four::five').first
1808
+ it 'matches many lowercase alphanumeric terms sep by ::' do
1809
+ token = lexer.tokenise('one::two::three::four::five').first
1827
1810
  expect(token.type).to eq(:NAME)
1828
1811
  expect(token.value).to eq('one::two::three::four::five')
1829
1812
  end
1830
1813
 
1831
- it 'should match lowercase alphanumeric terms prefixed by ::' do
1832
- token = @lexer.tokenise('::1one::2two::3three').first
1814
+ it 'matches lowercase alphanumeric terms prefixed by ::' do
1815
+ token = lexer.tokenise('::1one::2two::3three').first
1833
1816
  expect(token.type).to eq(:NAME)
1834
1817
  expect(token.value).to eq('::1one::2two::3three')
1835
1818
  end
1836
1819
 
1837
- it 'should match barewords beginning with an underscore' do
1838
- token = @lexer.tokenise('_bareword').first
1820
+ it 'matches barewords beginning with an underscore' do
1821
+ token = lexer.tokenise('_bareword').first
1839
1822
  expect(token.type).to eq(:NAME)
1840
1823
  expect(token.value).to eq('_bareword')
1841
1824
  end
1842
1825
  end
1843
1826
 
1844
1827
  context ':FUNCTION_NAME' do
1845
- it 'should match when a :NAME is followed by a :LPAREN' do
1846
- token = @lexer.tokenise('my_function(').first
1828
+ it 'matches when a :NAME is followed by a :LPAREN' do
1829
+ token = lexer.tokenise('my_function(').first
1847
1830
  expect(token.type).to eq(:FUNCTION_NAME)
1848
1831
  expect(token.value).to eq('my_function')
1849
1832
  end
1850
1833
  end
1851
1834
 
1852
1835
  context ':NUMBER' do
1853
- it 'should match numeric terms' do
1854
- token = @lexer.tokenise('1234567890').first
1836
+ it 'matches numeric terms' do
1837
+ token = lexer.tokenise('1234567890').first
1855
1838
  expect(token.type).to eq(:NUMBER)
1856
1839
  expect(token.value).to eq('1234567890')
1857
1840
  end
1858
1841
 
1859
- it 'should match float terms' do
1860
- token = @lexer.tokenise('12345.6789').first
1842
+ it 'matches float terms' do
1843
+ token = lexer.tokenise('12345.6789').first
1861
1844
  expect(token.type).to eq(:NUMBER)
1862
1845
  expect(token.value).to eq('12345.6789')
1863
1846
  end
1864
1847
 
1865
- it 'should match hexadecimal terms' do
1866
- token = @lexer.tokenise('0xCAFE1029').first
1848
+ it 'matches hexadecimal terms' do
1849
+ token = lexer.tokenise('0xCAFE1029').first
1867
1850
  expect(token.type).to eq(:NUMBER)
1868
1851
  expect(token.value).to eq('0xCAFE1029')
1869
1852
  end
1870
1853
 
1871
- it 'should match float with exponent terms' do
1872
- token = @lexer.tokenise('10e23').first
1873
- expect(token.type).to eq(:NUMBER)
1874
- expect(token.value).to eq('10e23')
1854
+ [
1855
+ '10e23',
1856
+ '1.234e5',
1857
+ ].each do |f|
1858
+ it 'matches float with exponent terms' do
1859
+ token = lexer.tokenise(f).first
1860
+ expect(token.type).to eq(:NUMBER)
1861
+ expect(token.value).to eq(f)
1862
+ end
1875
1863
  end
1876
1864
 
1877
- it 'should match float with negative exponent terms' do
1878
- token = @lexer.tokenise('10e-23').first
1865
+ it 'matches float with negative exponent terms' do
1866
+ token = lexer.tokenise('10e-23').first
1879
1867
  expect(token.type).to eq(:NUMBER)
1880
1868
  expect(token.value).to eq('10e-23')
1881
1869
  end
1882
-
1883
- it 'should match float with exponent terms' do
1884
- token = @lexer.tokenise('1.234e5').first
1885
- expect(token.type).to eq(:NUMBER)
1886
- expect(token.value).to eq('1.234e5')
1887
- end
1888
1870
  end
1889
1871
 
1890
1872
  context ':COMMENT' do
1891
- it 'should match everything on a line after #' do
1892
- token = @lexer.tokenise('foo # bar baz')[2]
1873
+ it 'matches everything on a line after #' do
1874
+ token = lexer.tokenise('foo # bar baz')[2]
1893
1875
  expect(token.type).to eq(:COMMENT)
1894
1876
  expect(token.value).to eq(' bar baz')
1895
1877
  end
1896
1878
 
1897
- it 'should not include DOS line endings in the comment value' do
1898
- tokens = @lexer.tokenise("foo # bar baz\r\n")
1879
+ it 'does not include DOS line endings in the comment value' do
1880
+ tokens = lexer.tokenise("foo # bar baz\r\n")
1899
1881
 
1900
- expect(tokens[2]).to have_attributes(:type => :COMMENT, :value => ' bar baz')
1901
- expect(tokens[3]).to have_attributes(:type => :NEWLINE, :value => "\r\n")
1882
+ expect(tokens[2]).to have_attributes(type: :COMMENT, value: ' bar baz')
1883
+ expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\r\n")
1902
1884
  end
1903
1885
 
1904
- it 'should not include Unix line endings in the comment value' do
1905
- tokens = @lexer.tokenise("foo # bar baz\n")
1886
+ it 'does not include Unix line endings in the comment value' do
1887
+ tokens = lexer.tokenise("foo # bar baz\n")
1906
1888
 
1907
- expect(tokens[2]).to have_attributes(:type => :COMMENT, :value => ' bar baz')
1908
- expect(tokens[3]).to have_attributes(:type => :NEWLINE, :value => "\n")
1889
+ expect(tokens[2]).to have_attributes(type: :COMMENT, value: ' bar baz')
1890
+ expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\n")
1909
1891
  end
1910
1892
  end
1911
1893
 
1912
1894
  context ':MLCOMMENT' do
1913
- it 'should match comments on a single line' do
1914
- token = @lexer.tokenise('/* foo bar */').first
1895
+ it 'matches comments on a single line' do
1896
+ token = lexer.tokenise('/* foo bar */').first
1915
1897
  expect(token.type).to eq(:MLCOMMENT)
1916
1898
  expect(token.value).to eq('foo bar')
1917
1899
  end
1918
1900
 
1919
- it 'should match comments on multiple lines' do
1920
- token = @lexer.tokenise("/* foo\n * bar\n*/").first
1901
+ it 'matches comments on multiple lines' do
1902
+ token = lexer.tokenise("/* foo\n * bar\n*/").first
1921
1903
  expect(token.type).to eq(:MLCOMMENT)
1922
1904
  expect(token.value).to eq("foo\n bar\n")
1923
1905
  end
1924
1906
  end
1925
1907
 
1926
1908
  context ':SLASH_COMMENT' do
1927
- it 'should match everyone on a line after //' do
1928
- token = @lexer.tokenise('foo // bar baz')[2]
1909
+ it 'matches everyone on a line after //' do
1910
+ token = lexer.tokenise('foo // bar baz')[2]
1929
1911
  expect(token.type).to eq(:SLASH_COMMENT)
1930
1912
  expect(token.value).to eq(' bar baz')
1931
1913
  end
1932
1914
 
1933
- it 'should not include DOS line endings in the comment value' do
1934
- tokens = @lexer.tokenise("foo // bar baz\r\n")
1915
+ it 'does not include DOS line endings in the comment value' do
1916
+ tokens = lexer.tokenise("foo // bar baz\r\n")
1935
1917
 
1936
- expect(tokens[2]).to have_attributes(:type => :SLASH_COMMENT, :value => ' bar baz')
1937
- expect(tokens[3]).to have_attributes(:type => :NEWLINE, :value => "\r\n")
1918
+ expect(tokens[2]).to have_attributes(type: :SLASH_COMMENT, value: ' bar baz')
1919
+ expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\r\n")
1938
1920
  end
1939
1921
 
1940
- it 'should not include Unix line endings in the comment value' do
1941
- tokens = @lexer.tokenise("foo // bar baz\n")
1922
+ it 'does not include Unix line endings in the comment value' do
1923
+ tokens = lexer.tokenise("foo // bar baz\n")
1942
1924
 
1943
- expect(tokens[2]).to have_attributes(:type => :SLASH_COMMENT, :value => ' bar baz')
1944
- expect(tokens[3]).to have_attributes(:type => :NEWLINE, :value => "\n")
1925
+ expect(tokens[2]).to have_attributes(type: :SLASH_COMMENT, value: ' bar baz')
1926
+ expect(tokens[3]).to have_attributes(type: :NEWLINE, value: "\n")
1945
1927
  end
1946
1928
  end
1947
1929
 
1948
1930
  context ':SSTRING' do
1949
- it 'should match a single quoted string' do
1950
- token = @lexer.tokenise("'single quoted string'").first
1931
+ it 'matches a single quoted string' do
1932
+ token = lexer.tokenise("'single quoted string'").first
1951
1933
  expect(token.type).to eq(:SSTRING)
1952
1934
  expect(token.value).to eq('single quoted string')
1953
1935
  end
1954
1936
 
1955
- it "should match a single quoted string with an escaped '" do
1956
- token = @lexer.tokenise(%q('single quoted string with "\\'"')).first
1937
+ it "matches a single quoted string with an escaped '" do
1938
+ token = lexer.tokenise(%q('single quoted string with "\\'"')).first
1957
1939
  expect(token.type).to eq(:SSTRING)
1958
1940
  expect(token.value).to eq('single quoted string with "\\\'"')
1959
1941
  end
1960
1942
 
1961
- it 'should match a single quoted string with an escaped $' do
1962
- token = @lexer.tokenise(%q('single quoted string with "\$"')).first
1943
+ it 'matches a single quoted string with an escaped $' do
1944
+ token = lexer.tokenise(%q('single quoted string with "\$"')).first
1963
1945
  expect(token.type).to eq(:SSTRING)
1964
1946
  expect(token.value).to eq('single quoted string with "\\$"')
1965
1947
  end
1966
1948
 
1967
- it 'should match a single quoted string with an escaped .' do
1968
- token = @lexer.tokenise(%q('single quoted string with "\."')).first
1949
+ it 'matches a single quoted string with an escaped .' do
1950
+ token = lexer.tokenise(%q('single quoted string with "\."')).first
1969
1951
  expect(token.type).to eq(:SSTRING)
1970
1952
  expect(token.value).to eq('single quoted string with "\\."')
1971
1953
  end
1972
1954
 
1973
- it 'should match a single quoted string with an escaped \\n' do
1974
- token = @lexer.tokenise(%q('single quoted string with "\n"')).first
1955
+ it 'matches a single quoted string with an escaped \n' do
1956
+ token = lexer.tokenise(%q('single quoted string with "\n"')).first
1975
1957
  expect(token.type).to eq(:SSTRING)
1976
1958
  expect(token.value).to eq('single quoted string with "\\n"')
1977
1959
  end
1978
1960
 
1979
- it 'should match a single quoted string with an escaped \\' do
1980
- token = @lexer.tokenise(%q('single quoted string with "\\\\"')).first
1981
- expect(token.type).to eq(:SSTRING)
1982
- expect(token.value).to eq('single quoted string with "\\\\"')
1983
- end
1984
-
1985
- it 'should match an empty string' do
1986
- token = @lexer.tokenise("''").first
1961
+ # it 'matches a single quoted string with an escaped \' do
1962
+ # token = lexer.tokenise(%q('single quoted string with "\\\\"')).first
1963
+ # expect(token.type).to eq(:SSTRING)
1964
+ # expect(token.value).to eq('single quoted string with "\\\\"')
1965
+ # end
1966
+ #
1967
+ it 'matches an empty string' do
1968
+ token = lexer.tokenise("''").first
1987
1969
  expect(token.type).to eq(:SSTRING)
1988
1970
  expect(token.value).to eq('')
1989
1971
  end
1990
1972
 
1991
- it 'should match an empty string ending with \\\\' do
1992
- token = @lexer.tokenise("'foo\\\\'").first
1973
+ it 'matches an empty string ending with \\' do
1974
+ token = lexer.tokenise("'foo\\\\'").first
1993
1975
  expect(token.type).to eq(:SSTRING)
1994
1976
  expect(token.value).to eq(%(foo\\\\))
1995
1977
  end
1996
1978
 
1997
- it 'should match single quoted string containing a line break' do
1998
- token = @lexer.tokenise("'\n'").first
1979
+ it 'matches single quoted string containing a line break' do
1980
+ token = lexer.tokenise("'\n'").first
1999
1981
  expect(token.type).to eq(:SSTRING)
2000
1982
  expect(token.value).to eq("\n")
2001
1983
  end
2002
1984
  end
2003
1985
 
2004
1986
  context ':REGEX' do
2005
- it 'should match anything enclosed in //' do
2006
- token = @lexer.tokenise('/this is a regex/').first
1987
+ it 'matches anything enclosed in //' do
1988
+ token = lexer.tokenise('/this is a regex/').first
2007
1989
  expect(token.type).to eq(:REGEX)
2008
1990
  expect(token.value).to eq('this is a regex')
2009
1991
  end
2010
1992
 
2011
- it 'should match even if there is \n in the regex' do
2012
- token = @lexer.tokenise("/this is a regex,\ntoo/").first
1993
+ it 'matches even if there is \n in the regex' do
1994
+ token = lexer.tokenise("/this is a regex,\ntoo/").first
2013
1995
  expect(token.type).to eq(:REGEX)
2014
1996
  expect(token.value).to eq("this is a regex,\ntoo")
2015
1997
  end
2016
1998
 
2017
- it 'should not consider \/ to be the end of the regex' do
2018
- token = @lexer.tokenise('/this is \/ a regex/').first
1999
+ it 'does not consider \/ to be the end of the regex' do
2000
+ token = lexer.tokenise('/this is \/ a regex/').first
2019
2001
  expect(token.type).to eq(:REGEX)
2020
2002
  expect(token.value).to eq('this is \\/ a regex')
2021
2003
  end
2022
2004
 
2023
- it 'should be allowed as a param to a data type' do
2024
- tokens = @lexer.tokenise('Foo[/bar/]')
2005
+ it 'is allowed as a param to a data type' do
2006
+ tokens = lexer.tokenise('Foo[/bar/]')
2025
2007
  expect(tokens[2].type).to eq(:REGEX)
2026
2008
  expect(tokens[2].value).to eq('bar')
2027
2009
  end
2028
2010
 
2029
- it 'should be allowed as a param to an optional data type' do
2030
- tokens = @lexer.tokenise('Optional[Regexp[/^puppet/]]')
2011
+ it 'is allowed as a param to an optional data type' do
2012
+ tokens = lexer.tokenise('Optional[Regexp[/^puppet/]]')
2031
2013
  expect(tokens[4].type).to eq(:REGEX)
2032
2014
  expect(tokens[4].value).to eq('^puppet')
2033
2015
  end
2034
2016
 
2035
- it 'should not match chained division' do
2036
- tokens = @lexer.tokenise('$x = $a/$b/$c')
2017
+ it 'does not match chained division' do
2018
+ tokens = lexer.tokenise('$x = $a/$b/$c')
2037
2019
  expect(tokens.select { |r| r.type == :REGEX }).to be_empty
2038
2020
  end
2039
2021
 
2040
- it 'should properly parse when regex follows an if' do
2041
- tokens = @lexer.tokenise('if /^icinga_service_icon_.*/ in $location_info { }')
2022
+ it 'properlies parse when regex follows an if' do
2023
+ tokens = lexer.tokenise('if /^icinga_service_icon_.*/ in $location_info { }')
2042
2024
  expect(tokens[2].type).to eq(:REGEX)
2043
2025
  end
2044
2026
 
2045
- it 'should properly parse when a regex follows an elsif' do
2046
- tokens = @lexer.tokenise('if /a/ in $location_info { } elsif /b/ in $location_info { }')
2027
+ it 'properlies parse when a regex follows an elsif' do
2028
+ tokens = lexer.tokenise('if /a/ in $location_info { } elsif /b/ in $location_info { }')
2047
2029
  expect(tokens[2].type).to eq(:REGEX)
2048
2030
  expect(tokens[14].type).to eq(:REGEX)
2049
2031
  end
2050
2032
 
2051
- it 'should properly parse when a regex is provided as a function argument' do
2052
- tokens = @lexer.tokenise('$somevar = $other_var.match(/([\w\.]+(:\d+)?(\/\w+)?)(:(\w+))?/)')
2033
+ it 'properlies parse when a regex is provided as a function argument' do
2034
+ tokens = lexer.tokenise('$somevar = $other_var.match(/([\w\.]+(:\d+)?(\/\w+)?)(:(\w+))?/)')
2053
2035
  expect(tokens[8].type).to eq(:REGEX)
2054
2036
  expect(tokens[8].value).to eq('([\w\.]+(:\d+)?(\/\w+)?)(:(\w+))?')
2055
2037
  end
2056
2038
 
2057
- it 'should discriminate between division and regexes' do
2058
- tokens = @lexer.tokenise('if $a/10==0 or $b=~/{}/')
2039
+ it 'discriminates between division and regexes' do
2040
+ tokens = lexer.tokenise('if $a/10==0 or $b=~/{}/')
2059
2041
  expect(tokens[3].type).to eq(:DIV)
2060
2042
  expect(tokens[12].type).to eq(:REGEX)
2061
2043
  expect(tokens[12].value).to eq('{}')
@@ -2063,22 +2045,22 @@ END
2063
2045
  end
2064
2046
 
2065
2047
  context ':STRING' do
2066
- it 'should parse strings with embedded strings' do
2048
+ it 'parses strings with embedded strings' do
2067
2049
  expect {
2068
- @lexer.tokenise('exec { "/bin/echo \"${environment}\"": }')
2069
- }.to_not raise_error
2050
+ lexer.tokenise('exec { "/bin/echo \"${environment}\"": }')
2051
+ }.not_to raise_error
2070
2052
  end
2071
2053
 
2072
- it 'should match double quoted string containing a line break' do
2073
- token = @lexer.tokenise(%("\n")).first
2054
+ it 'matches double quoted string containing a line break' do
2055
+ token = lexer.tokenise(%("\n")).first
2074
2056
  expect(token.type).to eq(:STRING)
2075
2057
  expect(token.value).to eq("\n")
2076
2058
  end
2077
2059
 
2078
- it 'should handle interpolated values that contain double quotes' do
2060
+ it 'handles interpolated values that contain double quotes' do
2079
2061
  manifest = %{"export bar=\\"${join(hiera('test'), "," )}\\""}
2080
2062
 
2081
- tokens = @lexer.tokenise(manifest)
2063
+ tokens = lexer.tokenise(manifest)
2082
2064
  expect(tokens[0].type).to eq(:DQPRE)
2083
2065
  expect(tokens[0].value).to eq('export bar=\"')
2084
2066
  expect(tokens[1].type).to eq(:FUNCTION_NAME)
@@ -2104,20 +2086,20 @@ END
2104
2086
  end
2105
2087
 
2106
2088
  context ':WHITESPACE' do
2107
- it 'should parse spaces' do
2108
- token = @lexer.tokenise(' ').first
2089
+ it 'parses spaces' do
2090
+ token = lexer.tokenise(' ').first
2109
2091
  expect(token.type).to eq(:WHITESPACE)
2110
2092
  expect(token.value).to eq(' ')
2111
2093
  end
2112
2094
 
2113
- it 'should parse tabs' do
2114
- token = @lexer.tokenise("\t").first
2095
+ it 'parses tabs' do
2096
+ token = lexer.tokenise("\t").first
2115
2097
  expect(token.type).to eq(:WHITESPACE)
2116
2098
  expect(token.value).to eq("\t")
2117
2099
  end
2118
2100
 
2119
- it 'should parse unicode spaces', :unless => RUBY_VERSION == '1.8.7' do
2120
- token = @lexer.tokenise("\xc2\xa0").first
2101
+ it 'parses unicode spaces', unless: RUBY_VERSION == '1.8.7' do
2102
+ token = lexer.tokenise("\xc2\xa0").first
2121
2103
  expect(token.type).to eq(:WHITESPACE)
2122
2104
  expect(token.value).to eq("\xc2\xa0")
2123
2105
  end