rley 0.5.12 → 0.5.13

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 78c5a2a83d1691c6c470f2fb4bf347f7dca44cb6
4
- data.tar.gz: 9c1f62cdf775e71e33ceecfc99db58298b4e3b82
3
+ metadata.gz: 457b41c7e35823bc234ddad0419bac1416494790
4
+ data.tar.gz: a50f2c9df76426c56c3106b2d5ef1dd46b319a98
5
5
  SHA512:
6
- metadata.gz: 662e52aa9aae198f6eb5e9ed1750a1bfa873969ad9bedea4ca7225babef3d9fa648b709c4bef4959432d62cf8e1733d556ce911efec9885a0d59e6d4972c0f50
7
- data.tar.gz: ab114da248a85b5e78fdb2ba9affca357a52b0cbe45cbaa97b1ad41becf71b0fa89db6e1f4487c0a23c59b9b9b83db5101e511f2b711a0d4f9a324ac2c3f3a2d
6
+ metadata.gz: db08ac3611d797984b6b196a5de5a4b15b5507b6b4973428ab28f1271b8b609b35b920bf54a883dbeb5d24dc503bf85b6952f5d0ad9c7bae74327e5cf411f0c8
7
+ data.tar.gz: 2d0b8623dd36c3f8fcfa3d2515eadae0cfc5bfd4934e0d03838b2e5a7b029ca382d6d03b3dd9565b77107b8b49ae96ca9a35e024020c26e61a949d5e1b2b3a85
@@ -1,3 +1,7 @@
1
+ ### 0.5.13 / 2018-02-09
2
+ * [CHANGE] File `examples/general/SRL/grammar.rb added names to each production rule.
3
+ * [CHANGE] File `examples/general/SRL/ast_builder.rb Code refactoring to take profit of rule naming.
4
+
1
5
  ### 0.5.12 / 2018-02-03
2
6
  * [CHANGE] Simple Regex Language is fully supported!...
3
7
  * [CHANGE] File `examples/general/SRL/grammar.rb added missing rule productions for Simple Regex Language.
@@ -37,248 +37,21 @@ class ASTBuilder < Rley::Parser::ParseTreeBuilder
37
37
  # @param theTokens [Array] The input tokens
38
38
  # @param theChildren [Array] Children nodes (one per rhs symbol)
39
39
  def new_parent_node(aProduction, aRange, theTokens, theChildren)
40
- node = case aProduction.name
41
- when 'srl_0' # rule 'srl' => 'expression'
42
- return_first_child(aRange, theTokens, theChildren)
43
-
44
- when 'expression_0' # rule 'expression' => %w[pattern separator flags]
45
- reduce_expression_0(aProduction, aRange, theTokens, theChildren)
46
-
47
- when 'expression_1' # rule 'expression' => 'pattern'
48
- return_first_child(aRange, theTokens, theChildren)
49
-
50
- when 'pattern_0' # rule 'pattern' => %w[pattern separator quantifiable]
51
- reduce_pattern_0(aProduction, aRange, theTokens, theChildren)
52
-
53
- when 'pattern_1' # rule 'pattern' => 'quantifiable'
54
- return_first_child(aRange, theTokens, theChildren)
55
-
56
- when 'separator_0' # rule 'separator' => 'COMMA'
57
- return_first_child(aRange, theTokens, theChildren)
58
-
59
- when 'separator_1' # rule 'separator' => []
60
- nil
61
-
62
- when 'flags_0' # rule 'flags' => %[flags separator single_flag]
63
- ### NEW
64
- reduce_flags_0(aProduction, aRange, theTokens, theChildren)
65
-
66
- when 'single_flag_0' # rule 'single_flag' => %w[CASE INSENSITIVE]
67
- ### NEW
68
- reduce_single_flag_0(aProduction, aRange, theTokens, theChildren)
69
-
70
- when 'single_flag_1' # rule 'single_flag' => %w[MULTI LINE]
71
- ### NEW
72
- reduce_single_flag_1(aProduction, aRange, theTokens, theChildren)
73
-
74
- when 'single_flag_2' # rule 'single_flag' => %w[ALL LAZY]
75
- ### NEW
76
- reduce_single_flag_2(aProduction, aRange, theTokens, theChildren)
77
-
78
- # rule 'quantifiable' => %w[begin_anchor anchorable end_anchor]
79
- when 'quantifiable_0'
80
- reduce_quantifiable_0(aProduction, aRange, theTokens, theChildren)
81
-
82
- # rule 'quantifiable' => %w[begin_anchor anchorable]
83
- when 'quantifiable_1'
84
- reduce_quantifiable_1(aProduction, aRange, theTokens, theChildren)
85
-
86
- # rule 'quantifiable' => %w[anchorable end_anchor]
87
- when 'quantifiable_2'
88
- reduce_quantifiable_2(aProduction, aRange, theTokens, theChildren)
89
-
90
- when 'quantifiable_3' # rule 'quantifiable' => 'anchorable'
91
- return_first_child(aRange, theTokens, theChildren)
92
-
93
- # rule 'begin_anchor' => %w[STARTS WITH]
94
- # rule 'begin_anchor' => %w[BEGIN WITH]
95
- when 'begin_anchor_0', 'begin_anchor_1'
96
- reduce_begin_anchor_0(aProduction, aRange, theTokens, theChildren)
97
-
98
- when 'end_anchor_0' # rule 'end_anchor' => %w[MUST END]
99
- reduce_end_anchor_0(aProduction, aRange, theTokens, theChildren)
100
-
101
- when 'anchorable_0' # rule 'anchorable' => 'assertable'
102
- return_first_child(aRange, theTokens, theChildren)
103
-
104
- when 'anchorable_1' # rule 'anchorable' => %w[assertable assertion]
105
- reduce_anchorable_1(aProduction, aRange, theTokens, theChildren)
106
-
107
- when 'anchorable_1' # rule 'anchorable' => %w[assertable assertion]
108
- reduce_anchorable_1(aProduction, aRange, theTokens, theChildren)
109
-
110
- # rule 'assertion' => %w[IF FOLLOWED BY assertable]
111
- when 'assertion_0'
112
- reduce_assertion_0(aProduction, aRange, theTokens, theChildren)
113
-
114
- # rule 'assertion' => %w[IF NOT FOLLOWED BY assertable]
115
- when 'assertion_1'
116
- reduce_assertion_1(aProduction, aRange, theTokens, theChildren)
117
-
118
- # rule 'assertion' => %w[IF ALREADY HAD assertable]
119
- when 'assertion_2'
120
- reduce_assertion_2(aProduction, aRange, theTokens, theChildren)
121
-
122
- # rule 'assertion' => %w[IF NOT ALREADY HAD assertable]
123
- when 'assertion_3'
124
- reduce_assertion_3(aProduction, aRange, theTokens, theChildren)
125
-
126
- when 'assertable_0' # rule 'assertable' => 'term'
127
- return_first_child(aRange, theTokens, theChildren)
128
-
129
- when 'assertable_1' # rule 'assertable' => %w[term quantifier]
130
- reduce_assertable_1(aProduction, aRange, theTokens, theChildren)
131
-
132
- when 'term_0' # rule 'term' => 'atom'
133
- return_first_child(aRange, theTokens, theChildren)
134
-
135
- when 'term_1' # rule 'term' => 'alternation'
136
- return_first_child(aRange, theTokens, theChildren)
137
-
138
- when 'term_2' # rule 'term' => 'grouping'
139
- return_first_child(aRange, theTokens, theChildren)
140
-
141
- when 'term_3' # rule 'term' => 'capturing_group'
142
- return_first_child(aRange, theTokens, theChildren)
143
-
144
- when 'atom_0' # rule 'atom' => 'letter_range'
145
- return_first_child(aRange, theTokens, theChildren)
146
-
147
- when 'atom_1' # rule 'atom' => 'digit_range'
148
- return_first_child(aRange, theTokens, theChildren)
149
-
150
- when 'atom_2' # rule 'atom' => 'character_class'
151
- return_first_child(aRange, theTokens, theChildren)
152
-
153
- when 'atom_3' # rule 'atom' => 'special_char'
154
- return_first_child(aRange, theTokens, theChildren)
155
-
156
- when 'atom_4' # rule 'atom' => 'literal'
157
- return_first_child(aRange, theTokens, theChildren)
158
-
159
- # rule 'letter_range' => %w[LETTER FROM LETTER_LIT TO LETTER_LIT]
160
- when 'letter_range_0'
161
- reduce_letter_range_0(aProduction, aRange, theTokens, theChildren)
162
-
163
- #rule 'letter_range' => %w[UPPERCASE LETTER FROM LETTER_LIT TO LETTER_LIT]
164
- when 'letter_range_1'
165
- reduce_letter_range_1(aProduction, aRange, theTokens, theChildren)
166
-
167
- when 'letter_range_2' # rule 'letter_range' => 'LETTER'
168
- reduce_letter_range_2(aProduction, aRange, theTokens, theChildren)
169
-
170
- when 'letter_range_3' # rule 'letter_range' => %w[UPPERCASE LETTER]
171
- reduce_letter_range_3(aProduction, aRange, theTokens, theChildren)
172
-
173
- # rule 'digit_range' => %w[digit_or_number FROM DIGIT_LIT TO DIGIT_LIT]
174
- when 'digit_range_0'
175
- reduce_digit_range_0(aProduction, aRange, theTokens, theChildren)
176
-
177
- when 'digit_range_1' # rule 'digit_range' => 'digit_or_number'
178
- reduce_digit_range_1(aProduction, aRange, theTokens, theChildren)
179
-
180
- when 'character_class_0' # rule 'character_class' => %w[ANY CHARACTER]
181
- reduce_character_class_0(aProduction, aRange, theTokens, theChildren)
182
-
183
- when 'character_class_1' # rule 'character_class' => %w[NO CHARACTER]
184
- reduce_character_class_1(aProduction, aRange, theTokens, theChildren)
185
-
186
- when 'character_class_2' # rule 'character_class' => 'WHITESPACE'
187
- reduce_character_class_2(aProduction, aRange, theTokens, theChildren)
188
-
189
- when 'character_class_3' # rule 'character_class' => %w[NO WHITESPACE]
190
- reduce_character_class_3(aProduction, aRange, theTokens, theChildren)
191
-
192
- when 'character_class_4' # rule 'character_class' => 'ANYTHING'
193
- reduce_character_class_4(aProduction, aRange, theTokens, theChildren)
194
-
195
- when 'character_class_5' # rule 'character_class' => %w[ONE OF STRING_LIT]
196
- reduce_character_class_5(aProduction, aRange, theTokens, theChildren)
197
-
198
- when 'special_char_0' # rule 'special_char' => 'TAB'
199
- reduce_special_char_0(aProduction, aRange, theTokens, theChildren)
200
-
201
- when 'special_char_1' # rule 'special_char' => 'BACKSLASH'
202
- reduce_special_char_1(aProduction, aRange, theTokens, theChildren)
203
-
204
- when 'special_char_2' # rule 'special_char' => %w[NEW LINE]
205
- reduce_special_char_2(aProduction, aRange, theTokens, theChildren)
206
-
207
- when 'literal_0' # rule 'literal' => %[LITERALLY STRING_LIT]
208
- reduce_literal_0(aProduction, aRange, theTokens, theChildren)
209
-
210
- # rule 'alternation' => %w[ANY OF LPAREN alternatives RPAREN]
211
- when 'alternation_0'
212
- reduce_alternation_0(aProduction, aRange, theTokens, theChildren)
213
-
214
- # rule 'alternatives' => %w[alternatives separator quantifiable]
215
- when 'alternatives_0'
216
- reduce_alternatives_0(aProduction, aRange, theTokens, theChildren)
217
-
218
- when 'alternatives_1' # rule 'alternatives' => 'quantifiable'
219
- reduce_alternatives_1(aProduction, aRange, theTokens, theChildren)
220
-
221
- when 'grouping_0' # rule 'grouping' => %w[LPAREN pattern RPAREN]
222
- reduce_grouping_0(aProduction, aRange, theTokens, theChildren)
223
-
224
- # rule 'capturing_group' => %w[CAPTURE assertable]
225
- when 'capturing_group_0'
226
- reduce_capturing_group_0(aProduction, aRange, theTokens, theChildren)
227
-
228
- # rule 'capturing_group' => %w[CAPTURE assertable UNTIL assertable]
229
- when 'capturing_group_1'
230
- reduce_capturing_group_1(aProduction, aRange, theTokens, theChildren)
231
-
232
- # rule 'capturing_group' => %w[CAPTURE assertable AS var_name]
233
- when 'capturing_group_2'
234
- reduce_capturing_group_2(aProduction, aRange, theTokens, theChildren)
235
-
236
- # rule 'capturing_group' => %w[CAPTURE assertable AS var_name UNTIL assertable]
237
- when 'capturing_group_3'
238
- reduce_capturing_group_3(aProduction, aRange, theTokens, theChildren)
239
-
240
- when 'var_name_0' # rule 'var_name' => 'STRING_LIT'
241
- return_first_child(aRange, theTokens, theChildren)
242
-
243
- when 'quantifier_0' # rule 'quantifier' => 'ONCE'
244
- multiplicity(1, 1)
245
-
246
- when 'quantifier_1' # rule 'quantifier' => 'TWICE'
247
- multiplicity(2, 2)
248
-
249
- when 'quantifier_2' # rule 'quantifier' => %w[EXACTLY count TIMES]
250
- reduce_quantifier_2(aProduction, aRange, theTokens, theChildren)
251
-
252
- # rule 'quantifier' => %w[BETWEEN count AND count times_suffix]
253
- when 'quantifier_3'
254
- reduce_quantifier_3(aProduction, aRange, theTokens, theChildren)
255
-
256
- when 'quantifier_4' # rule 'quantifier' => 'OPTIONAL'
257
- multiplicity(0, 1)
258
-
259
- when 'quantifier_5' # rule 'quantifier' => %w[ONCE OR MORE]
260
- multiplicity(1, :more)
261
-
262
- when 'quantifier_6' # rule 'quantifier' => %w[NEVER OR MORE]
263
- multiplicity(0, :more)
264
-
265
- when 'quantifier_7' # rule 'quantifier' => %w[AT LEAST count TIMES]
266
- reduce_quantifier_7(aProduction, aRange, theTokens, theChildren)
267
-
268
- # rule 'digit_or_number' => 'DIGIT'
269
- # rule 'digit_or_number' => 'NUMER'
270
- when 'digit_or_number_0', 'digit_or_number_1'
271
- return_first_child(aRange, theTokens, theChildren)
272
-
273
- when 'count_0', 'count_1'
274
- return_first_child(aRange, theTokens, theChildren)
275
-
276
- when 'times_suffix_0', 'times_suffix_1'
277
- nil
278
- else
279
- raise StandardError, "Don't know production #{aProduction.name}"
40
+ short_name = aProduction.name
41
+ method_name = 'reduce_' + short_name
42
+ if self.respond_to?(method_name, true)
43
+ node = send(method_name, aProduction, aRange, theTokens, theChildren)
44
+ else
45
+ # Default action...
46
+ node = case aProduction.rhs.size
47
+ when 0
48
+ nil
49
+ when 1
50
+ return_first_child(aRange, theTokens, theChildren)
51
+ else
52
+ raise StandardError, "Don't know production '#{aProduction.name}'"
53
+ end
280
54
  end
281
-
282
55
  return node
283
56
  end
284
57
 
@@ -330,259 +103,301 @@ class ASTBuilder < Rley::Parser::ParseTreeBuilder
330
103
  def repetition(expressionToRepeat, aMultiplicity)
331
104
  return Regex::Repetition.new(expressionToRepeat, aMultiplicity)
332
105
  end
333
-
334
- # rule 'expression' => %w[pattern separator flags]
335
- def reduce_expression_0(aProduction, aRange, theTokens, theChildren)
106
+
107
+ def begin_anchor
108
+ return Regex::Anchor.new('^')
109
+ end
110
+
111
+ # rule('expression' => %w[pattern separator flags]).as 'flagged_expr'
112
+ def reduce_flagged_expr(aProduction, aRange, theTokens, theChildren)
336
113
  @options = theChildren[2] if theChildren[2]
337
114
  return_first_child(aRange, theTokens, theChildren)
338
115
  end
339
116
 
340
- # rule 'pattern' => %w[pattern separator quantifiable]
341
- def reduce_pattern_0(aProduction, aRange, theTokens, theChildren)
117
+ # rule('pattern' => %w[pattern separator quantifiable]).as 'pattern_sequence'
118
+ def reduce_pattern_sequence(aProduction, aRange, theTokens, theChildren)
342
119
  return Regex::Concatenation.new(theChildren[0], theChildren[2])
343
120
  end
344
121
 
345
- # rule 'flags' => %[flags separator single_flag]
346
- def reduce_flags_0(aProduction, aRange, theTokens, theChildren)
122
+ # rule('flags' => %[flags separator single_flag]).as 'flag_sequence'
123
+ def reduce_flag_sequence(aProduction, aRange, theTokens, theChildren)
347
124
  theChildren[0] << theChildren[2]
348
125
  end
349
126
 
350
- # rule 'single_flag' => %w[CASE INSENSITIVE]
351
- def reduce_single_flag_0(aProduction, aRange, theTokens, theChildren)
127
+ # rule('single_flag' => %w[CASE INSENSITIVE]).as 'case_insensitive'
128
+ def reduce_case_insensitive(aProduction, aRange, theTokens, theChildren)
352
129
  return [ Regex::MatchOption.new(:IGNORECASE, true) ]
353
130
  end
354
131
 
355
- # rule 'single_flag' => %w[MULTI LINE]
356
- def reduce_single_flag_1(aProduction, aRange, theTokens, theChildren)
132
+ # rule('single_flag' => %w[MULTI LINE]).as 'multi_line'
133
+ def reduce_multi_line(aProduction, aRange, theTokens, theChildren)
357
134
  return [ Regex::MatchOption.new(:MULTILINE, true) ]
358
135
  end
359
136
 
360
- # rule 'single_flag' => %w[ALL LAZY]
361
- def reduce_single_flag_2(aProduction, aRange, theTokens, theChildren)
137
+ # rule('single_flag' => %w[ALL LAZY]).as 'all_lazy'
138
+ def reduce_all_lazy(aProduction, aRange, theTokens, theChildren)
362
139
  return [ Regex::MatchOption.new(:ALL_LAZY, true) ]
363
140
  end
364
141
 
365
142
  # rule 'quantifiable' => %w[begin_anchor anchorable end_anchor]
366
- def reduce_quantifiable_0(aProduction, aRange, theTokens, theChildren)
143
+ def reduce_pinned_quantifiable(aProduction, aRange, theTokens, theChildren)
367
144
  theChildren[1].begin_anchor = theChildren[0]
368
145
  theChildren[1].end_anchor = theChildren[2]
369
146
  return theChildren[1]
370
147
  end
371
148
 
372
149
  # rule 'quantifiable' => %w[begin_anchor anchorable]
373
- def reduce_quantifiable_1(aProduction, aRange, theTokens, theChildren)
150
+ def reduce_begin_anchor_quantifiable(aProduction, aRange, theTokens, theChildren)
374
151
  theChildren[1].begin_anchor = theChildren[0]
375
152
  return theChildren[1]
376
153
  end
377
154
 
378
155
  # rule 'quantifiable' => %w[anchorable end_anchor]
379
- def reduce_quantifiable_2(aProduction, aRange, theTokens, theChildren)
156
+ def reduce_end_anchor_quantifiable(aProduction, aRange, theTokens, theChildren)
380
157
  theChildren[0].end_anchor = theChildren[1]
381
158
  return theChildren[0]
382
159
  end
383
160
 
384
161
  # rule 'begin_anchor' => %w[STARTS WITH]
385
- # rule 'begin_anchor' => %w[BEGIN WITH]
386
- def reduce_begin_anchor_0(aProduction, aRange, theTokens, theChildren)
387
- return Regex::Anchor.new('^')
162
+ def reduce_starts_with(aProduction, aRange, theTokens, theChildren)
163
+ begin_anchor
388
164
  end
389
165
 
390
- # rule 'end_anchor' => %w[MUST END]
391
- def reduce_end_anchor_0(aProduction, aRange, theTokens, theChildren)
166
+ # rule 'begin_anchor' => %w[BEGIN WITH]
167
+ def reduce_begin_with(aProduction, aRange, theTokens, theChildren)
168
+ begin_anchor
169
+ end
170
+
171
+ # rule 'end_anchor' => %w[MUST END].as 'end_anchor'
172
+ def reduce_end_anchor(aProduction, aRange, theTokens, theChildren)
392
173
  return Regex::Anchor.new('$')
393
174
  end
394
175
 
395
-
396
- # rule 'anchorable' => %w[assertable assertion]
397
- def reduce_anchorable_1(aProduction, aRange, theTokens, theChildren)
176
+ # rule('anchorable' => %w[assertable assertion]).as 'asserted_anchorable'
177
+ def reduce_asserted_anchorable(aProduction, aRange, theTokens, theChildren)
398
178
  assertion = theChildren.last
399
179
  assertion.children.unshift(theChildren[0])
400
180
  return assertion
401
181
  end
402
182
 
403
- # rule 'assertion' => %w[IF FOLLOWED BY assertable]
404
- def reduce_assertion_0(aProduction, aRange, theTokens, theChildren)
183
+ # rule('assertion' => %w[IF FOLLOWED BY assertable]).as 'if_followed'
184
+ def reduce_if_followed(aProduction, aRange, theTokens, theChildren)
405
185
  return Regex::Lookaround.new(theChildren.last, :ahead, :positive)
406
186
  end
407
187
 
408
- # rule 'assertion' => %w[IF NOT FOLLOWED BY assertable]
409
- def reduce_assertion_1(aProduction, aRange, theTokens, theChildren)
188
+ # rule('assertion' => %w[IF NOT FOLLOWED BY assertable]).as 'if_not_followed'
189
+ def reduce_if_not_followed(aProduction, aRange, theTokens, theChildren)
410
190
  return Regex::Lookaround.new(theChildren.last, :ahead, :negative)
411
191
  end
412
192
 
413
- # rule 'assertion' => %w[IF ALREADY HAD assertable]
414
- def reduce_assertion_2(aProduction, aRange, theTokens, theChildren)
193
+ # rule('assertion' => %w[IF ALREADY HAD assertable]).as 'if_had'
194
+ def reduce_if_had(aProduction, aRange, theTokens, theChildren)
415
195
  return Regex::Lookaround.new(theChildren.last, :behind, :positive)
416
196
  end
417
197
 
418
- # rule 'assertion' => %w[IF NOT ALREADY HAD assertable]
419
- def reduce_assertion_3(aProduction, aRange, theTokens, theChildren)
198
+ # rule('assertion' => %w[IF NOT ALREADY HAD assertable]).as 'if_not_had'
199
+ def reduce_if_not_had(aProduction, aRange, theTokens, theChildren)
420
200
  return Regex::Lookaround.new(theChildren.last, :behind, :negative)
421
201
  end
422
202
 
423
- # rule 'anchorable' => %w[term quantifier]
424
- def reduce_assertable_1(aProduction, aRange, theTokens, theChildren)
203
+ # rule('assertable' => %w[term quantifier]).as 'quantified_assertable'
204
+ def reduce_quantified_assertable(aProduction, aRange, theTokens, theChildren)
425
205
  quantifier = theChildren[1]
426
206
  term = theChildren[0]
427
207
  repetition(term, quantifier)
428
208
  end
429
209
 
430
- # rule 'letter_range' => %w[LETTER FROM LETTER_LIT TO LETTER_LIT]
431
- def reduce_letter_range_0(aProduction, aRange, theTokens, theChildren)
210
+ # rule('letter_range' => %w[LETTER FROM LETTER_LIT TO LETTER_LIT]).as 'lowercase_from_to'
211
+ def reduce_lowercase_from_to(aProduction, aRange, theTokens, theChildren)
432
212
  lower = theChildren[2].token.lexeme
433
213
  upper = theChildren[4].token.lexeme
434
214
  ch_range = char_range(lower, upper)
435
215
  char_class(false, ch_range)
436
216
  end
437
217
 
438
- # rule 'letter_range' => %w[UPPERCASE LETTER FROM LETTER_LIT TO LETTER_LIT]
439
- def reduce_letter_range_1(aProduction, aRange, theTokens, theChildren)
218
+ # rule('letter_range' => %w[UPPERCASE LETTER FROM LETTER_LIT TO LETTER_LIT]).as 'uppercase_from_to'
219
+ def reduce_uppercase_from_to(aProduction, aRange, theTokens, theChildren)
440
220
  lower = theChildren[3].token.lexeme
441
221
  upper = theChildren[5].token.lexeme
442
222
  ch_range = char_range(lower.upcase, upper.upcase)
443
223
  char_class(false, ch_range)
444
224
  end
445
225
 
446
- # rule 'letter_range' => 'LETTER'
447
- def reduce_letter_range_2(aProduction, aRange, theTokens, theChildren)
226
+ # rule('letter_range' => 'LETTER').as 'any_lowercase'
227
+ def reduce_any_lowercase(aProduction, aRange, theTokens, theChildren)
448
228
  ch_range = char_range('a', 'z')
449
229
  char_class(false, ch_range)
450
230
  end
451
231
 
452
- #rule 'letter_range' => %w[UPPERCASE LETTER]
453
- def reduce_letter_range_3(aProduction, aRange, theTokens, theChildren)
232
+ # rule('letter_range' => %w[UPPERCASE LETTER]).as 'any_uppercase'
233
+ def reduce_any_uppercase(aProduction, aRange, theTokens, theChildren)
454
234
  ch_range = char_range('A', 'Z')
455
235
  char_class(false, ch_range)
456
236
  end
457
237
 
458
- # rule 'digit_range' => %w[digit_or_number FROM DIGIT_LIT TO DIGIT_LIT]
459
- def reduce_digit_range_0(aProduction, aRange, theTokens, theChildren)
460
- reduce_letter_range_0(aProduction, aRange, theTokens, theChildren)
238
+ # rule('digit_range' => %w[digit_or_number FROM DIGIT_LIT TO DIGIT_LIT]).as 'digits_from_to'
239
+ def reduce_digits_from_to(aProduction, aRange, theTokens, theChildren)
240
+ reduce_lowercase_from_to(aProduction, aRange, theTokens, theChildren)
461
241
  end
462
242
 
463
- # rule 'digit_range' => 'digit_or_number'
464
- def reduce_digit_range_1(aProduction, aRange, theTokens, theChildren)
243
+ # rule('digit_range' => 'digit_or_number').as 'simple_digit_range'
244
+ def reduce_simple_digit_range(aProduction, aRange, theTokens, theChildren)
465
245
  char_shorthand('d')
466
246
  end
467
247
 
468
- # rule 'character_class' => %w[ANY CHARACTER]
469
- def reduce_character_class_0(aProduction, aRange, theTokens, theChildren)
248
+ # rule('character_class' => %w[ANY CHARACTER]).as 'any_character'
249
+ def reduce_any_character(aProduction, aRange, theTokens, theChildren)
470
250
  char_shorthand('w')
471
251
  end
472
252
 
473
- # rule 'character_class' => %w[NO CHARACTER]
474
- def reduce_character_class_1(aProduction, aRange, theTokens, theChildren)
253
+ # rule('character_class' => %w[NO CHARACTER]).as 'no_character'
254
+ def reduce_no_character(aProduction, aRange, theTokens, theChildren)
475
255
  char_shorthand('W')
476
256
  end
477
257
 
478
- # rule 'character_class' => 'WHITESPACE'
479
- def reduce_character_class_2(aProduction, aRange, theTokens, theChildren)
258
+ # rule('character_class' => 'WHITESPACE').as 'whitespace'
259
+ def reduce_whitespace(aProduction, aRange, theTokens, theChildren)
480
260
  char_shorthand('s')
481
261
  end
482
262
 
483
- # rule 'character_class' => %w[NO WHITESPACE]
484
- def reduce_character_class_3(aProduction, aRange, theTokens, theChildren)
263
+ # rule('character_class' => %w[NO WHITESPACE]).as 'no_whitespace'
264
+ def reduce_no_whitespace(aProduction, aRange, theTokens, theChildren)
485
265
  char_shorthand('S')
486
266
  end
487
267
 
488
- # rule 'character_class' => 'ANYTHING'
489
- def reduce_character_class_4(aProduction, aRange, theTokens, theChildren)
268
+ # rule('character_class' => 'ANYTHING').as 'anything'
269
+ def reduce_anything(aProduction, aRange, theTokens, theChildren)
490
270
  wildcard
491
271
  end
492
272
 
493
- # rule 'character_class' => %w[ONE OF STRING_LIT]
494
- def reduce_character_class_5(aProduction, aRange, theTokens, theChildren)
273
+ # rule('alternation' => %w[ANY OF LPAREN alternatives RPAREN]).as 'any_of'
274
+ def reduce_one_of(aProduction, aRange, theTokens, theChildren)
495
275
  raw_literal = theChildren[-1].token.lexeme.dup
496
276
  alternatives = raw_literal.chars.map { |ch| Regex::Character.new(ch) }
497
277
  return Regex::CharClass.new(false, *alternatives) # TODO check other implementations
498
278
  end
499
279
 
500
- # rule 'special_char' => 'TAB'
501
- def reduce_special_char_0(aProduction, aRange, theTokens, theChildren)
280
+ # rule('special_char' => 'TAB').as 'tab'
281
+ def reduce_tab(aProduction, aRange, theTokens, theChildren)
502
282
  Regex::Character.new('\t')
503
283
  end
504
284
 
505
- # rule 'special_char' => 'BACKSLASH'
506
- def reduce_special_char_1(aProduction, aRange, theTokens, theChildren)
285
+ # rule('special_char' => 'BACKSLASH').as 'backslash'
286
+ def reduce_backslash(aProduction, aRange, theTokens, theChildren)
507
287
  Regex::Character.new('\\')
508
288
  end
509
289
 
510
- # rule 'special_char' => %w[NEW LINE]
511
- def reduce_special_char_2(aProduction, aRange, theTokens, theChildren)
290
+ # rule('special_char' => %w[NEW LINE]).as 'new_line'
291
+ def reduce_new_line(aProduction, aRange, theTokens, theChildren)
512
292
  # TODO: control portability
513
293
  Regex::Character.new('\n')
514
294
  end
515
295
 
516
- # rule 'literal' => %[LITERALLY STRING_LIT]
517
- def reduce_literal_0(aProduction, aRange, theTokens, theChildren)
296
+ # rule('literal' => %w[LITERALLY STRING_LIT]).as 'literally'
297
+ def reduce_literally(aProduction, aRange, theTokens, theChildren)
518
298
  # What if literal is empty?...
519
299
 
520
300
  raw_literal = theChildren[-1].token.lexeme.dup
521
301
  return string_literal(raw_literal)
522
302
  end
523
303
 
524
- # rule 'alternation' => %w[ANY OF LPAREN alternatives RPAREN]
525
- def reduce_alternation_0(aProduction, aRange, theTokens, theChildren)
304
+ #rule('alternation' => %w[ANY OF LPAREN alternatives RPAREN]).as 'any_of'
305
+ def reduce_any_of(aProduction, aRange, theTokens, theChildren)
526
306
  return Regex::Alternation.new(*theChildren[3])
527
307
  end
528
308
 
529
- # rule 'alternatives' => %w[alternatives separator quantifiable]
530
- def reduce_alternatives_0(aProduction, aRange, theTokens, theChildren)
309
+ # rule('alternatives' => %w[alternatives separator quantifiable]).as 'alternative_list'
310
+ def reduce_alternative_list(aProduction, aRange, theTokens, theChildren)
531
311
  return theChildren[0] << theChildren[-1]
532
312
  end
533
313
 
534
- # rule 'alternatives' => 'quantifiable'
535
- def reduce_alternatives_1(aProduction, aRange, theTokens, theChildren)
314
+ # rule('alternatives' => 'quantifiable').as 'simple_alternative'
315
+ def reduce_simple_alternative(aProduction, aRange, theTokens, theChildren)
536
316
  return [theChildren.last]
537
317
  end
538
318
 
539
- # rule 'grouping' => %w[LPAREN pattern RPAREN]
540
- def reduce_grouping_0(aProduction, aRange, theTokens, theChildren)
319
+ # rule('grouping' => %w[LPAREN pattern RPAREN]).as 'grouping_parenthenses'
320
+ def reduce_grouping_parenthenses(aProduction, aRange, theTokens, theChildren)
541
321
  return Regex::NonCapturingGroup.new(theChildren[1])
542
322
  end
543
-
544
- # rule 'capturing_group' => %w[CAPTURE assertable]
545
- def reduce_capturing_group_0(aProduction, aRange, theTokens, theChildren)
323
+
324
+ # rule('capturing_group' => %w[CAPTURE assertable]).as 'capture'
325
+ def reduce_capture(aProduction, aRange, theTokens, theChildren)
546
326
  return Regex::CapturingGroup.new(theChildren[1])
547
327
  end
548
328
 
549
- # rule 'capturing_group' => %w[CAPTURE assertable UNTIL assertable]
550
- def reduce_capturing_group_1(aProduction, aRange, theTokens, theChildren)
329
+ # rule('capturing_group' => %w[CAPTURE assertable UNTIL assertable]).as 'capture_until'
330
+ def reduce_capture_until(aProduction, aRange, theTokens, theChildren)
551
331
  group = Regex::CapturingGroup.new(theChildren[1])
552
332
  return Regex::Concatenation.new(group, theChildren[3])
553
333
  end
554
334
 
555
- # rule 'capturing_group' => %w[CAPTURE assertable AS var_name]
556
- def reduce_capturing_group_2(aProduction, aRange, theTokens, theChildren)
335
+ # rule('capturing_group' => %w[CAPTURE assertable AS var_name]).as 'named_capture'
336
+ def reduce_named_capture(aProduction, aRange, theTokens, theChildren)
557
337
  name = theChildren[3].token.lexeme.dup
558
338
  return Regex::CapturingGroup.new(theChildren[1], name)
559
339
  end
560
340
 
561
- # rule 'capturing_group' => %w[CAPTURE assertable AS var_name UNTIL assertable]
562
- def reduce_capturing_group_3(aProduction, aRange, theTokens, theChildren)
341
+ # rule('capturing_group' => %w[CAPTURE assertable AS var_name UNTIL assertable]).as 'named_capture_until'
342
+ def reduce_named_capture_until(aProduction, aRange, theTokens, theChildren)
563
343
  name = theChildren[3].token.lexeme.dup
564
344
  group = Regex::CapturingGroup.new(theChildren[1], name)
565
- return Regex::Concatenation.new(group, theChildren[5])
345
+ return Regex::Concatenation.new(group, theChildren[5])
566
346
  end
347
+
348
+ # rule('quantifier' => 'ONCE').as 'once'
349
+ def reduce_once(aProduction, aRange, theTokens, theChildren)
350
+ multiplicity(1, 1)
351
+ end
352
+
353
+ # rule('quantifier' => 'TWICE').as 'twice'
354
+ def reduce_twice(aProduction, aRange, theTokens, theChildren)
355
+ multiplicity(2, 2)
356
+ end
567
357
 
568
- # rule 'quantifier' => %w[EXACTLY count TIMES]
569
- def reduce_quantifier_2(aProduction, aRange, theTokens, theChildren)
358
+ # rule('quantifier' => %w[EXACTLY count TIMES]).as 'exactly'
359
+ def reduce_exactly(aProduction, aRange, theTokens, theChildren)
570
360
  count = theChildren[1].token.lexeme.to_i
571
361
  multiplicity(count, count)
572
362
  end
573
363
 
574
- # rule 'quantifier' => %w[BETWEEN count AND count times_suffix]
575
- def reduce_quantifier_3(aProduction, aRange, theTokens, theChildren)
364
+ # rule('quantifier' => %w[BETWEEN count AND count times_suffix]).as 'between_and'
365
+ def reduce_between_and(aProduction, aRange, theTokens, theChildren)
576
366
  lower = theChildren[1].token.lexeme.to_i
577
367
  upper = theChildren[3].token.lexeme.to_i
578
368
  multiplicity(lower, upper)
579
369
  end
370
+
371
+ # rule('quantifier' => 'OPTIONAL').as 'optional'
372
+ def reduce_optional(aProduction, aRange, theTokens, theChildren)
373
+ multiplicity(0, 1)
374
+ end
580
375
 
581
- # rule 'quantifier' => %w[AT LEAST count TIMES]
582
- def reduce_quantifier_7(aProduction, aRange, theTokens, theChildren)
376
+ # rule('quantifier' => %w[ONCE OR MORE]).as 'once_or_more'
377
+ def reduce_once_or_more(aProduction, aRange, theTokens, theChildren)
378
+ multiplicity(1, :more)
379
+ end
380
+
381
+ # rule('quantifier' => %w[NEVER OR MORE]).as 'never_or_more'
382
+ def reduce_never_or_more(aProduction, aRange, theTokens, theChildren)
383
+ multiplicity(0, :more)
384
+ end
385
+
386
+ # rule('quantifier' => %w[AT LEAST count TIMES]).as 'at_least'
387
+ def reduce_at_least(aProduction, aRange, theTokens, theChildren)
583
388
  count = theChildren[2].token.lexeme.to_i
584
389
  multiplicity(count, :more)
390
+ end
391
+
392
+ # rule('times_suffix' => 'TIMES').as 'times_keyword'
393
+ def reduce_times_keyword(aProduction, aRange, theTokens, theChildren)
394
+ return nil
585
395
  end
396
+
397
+ # rule('times_suffix' => []).as 'times_dropped'
398
+ def reduce_times_dropped(aProduction, aRange, theTokens, theChildren)
399
+ return nil
400
+ end
586
401
 
587
402
  end # class
588
403
  # End of file
@@ -25,80 +25,80 @@ module SRL
25
25
  add_terminals('CASE', 'INSENSITIVE', 'MULTI', 'ALL')
26
26
  add_terminals('LAZY')
27
27
 
28
- rule 'srl' => 'expression'
29
- rule 'expression' => %w[pattern separator flags]
30
- rule 'expression' => 'pattern'
31
- rule 'pattern' => %w[pattern separator quantifiable]
32
- rule 'pattern' => 'quantifiable'
33
- rule 'separator' => 'COMMA'
34
- rule 'separator' => []
35
- rule 'flags' => %[flags separator single_flag]
36
- rule 'single_flag' => %w[CASE INSENSITIVE]
37
- rule 'single_flag' => %w[MULTI LINE]
38
- rule 'single_flag' => %w[ALL LAZY]
39
- rule 'quantifiable' => %w[begin_anchor anchorable end_anchor]
40
- rule 'quantifiable' => %w[begin_anchor anchorable]
41
- rule 'quantifiable' => %w[anchorable end_anchor]
42
- rule 'quantifiable' => 'anchorable'
43
- rule 'begin_anchor' => %w[STARTS WITH]
44
- rule 'begin_anchor' => %w[BEGIN WITH]
45
- rule 'end_anchor' => %w[MUST END]
46
- rule 'anchorable' => 'assertable'
47
- rule 'anchorable' => %w[assertable assertion]
48
- rule 'assertion' => %w[IF FOLLOWED BY assertable]
49
- rule 'assertion' => %w[IF NOT FOLLOWED BY assertable]
50
- rule 'assertion' => %w[IF ALREADY HAD assertable]
51
- rule 'assertion' => %w[IF NOT ALREADY HAD assertable]
52
- rule 'assertable' => 'term'
53
- rule 'assertable' => %w[term quantifier]
54
- rule 'term' => 'atom'
55
- rule 'term' => 'alternation'
56
- rule 'term' => 'grouping'
57
- rule 'term' => 'capturing_group'
58
- rule 'atom' => 'letter_range'
59
- rule 'atom' => 'digit_range'
60
- rule 'atom' => 'character_class'
61
- rule 'atom' => 'special_char'
62
- rule 'atom' => 'literal'
63
- rule 'letter_range' => %w[LETTER FROM LETTER_LIT TO LETTER_LIT]
64
- rule 'letter_range' => %w[UPPERCASE LETTER FROM LETTER_LIT TO LETTER_LIT]
65
- rule 'letter_range' => 'LETTER'
66
- rule 'letter_range' => %w[UPPERCASE LETTER]
67
- rule 'digit_range' => %w[digit_or_number FROM DIGIT_LIT TO DIGIT_LIT]
68
- rule 'digit_range' => 'digit_or_number'
69
- rule 'character_class' => %w[ANY CHARACTER]
70
- rule 'character_class' => %w[NO CHARACTER]
71
- rule 'character_class' => 'WHITESPACE'
72
- rule 'character_class' => %w[NO WHITESPACE]
73
- rule 'character_class' => 'ANYTHING'
74
- rule 'character_class' => %w[ONE OF STRING_LIT]
75
- rule 'special_char' => 'TAB'
76
- rule 'special_char' => 'BACKSLASH'
77
- rule 'special_char' => %w[NEW LINE]
78
- rule 'literal' => %w[LITERALLY STRING_LIT]
79
- rule 'alternation' => %w[ANY OF LPAREN alternatives RPAREN]
80
- rule 'alternatives' => %w[alternatives separator quantifiable]
81
- rule 'alternatives' => 'quantifiable'
82
- rule 'grouping' => %w[LPAREN pattern RPAREN]
83
- rule 'capturing_group' => %w[CAPTURE assertable]
84
- rule 'capturing_group' => %w[CAPTURE assertable UNTIL assertable]
85
- rule 'capturing_group' => %w[CAPTURE assertable AS var_name]
86
- rule 'capturing_group' => %w[CAPTURE assertable AS var_name UNTIL assertable]
87
- rule 'var_name' => 'STRING_LIT'
88
- rule 'quantifier' => 'ONCE'
89
- rule 'quantifier' => 'TWICE'
90
- rule 'quantifier' => %w[EXACTLY count TIMES]
91
- rule 'quantifier' => %w[BETWEEN count AND count times_suffix]
92
- rule 'quantifier' => 'OPTIONAL'
93
- rule 'quantifier' => %w[ONCE OR MORE]
94
- rule 'quantifier' => %w[NEVER OR MORE]
95
- rule 'quantifier' => %w[AT LEAST count TIMES]
96
- rule 'digit_or_number' => 'DIGIT'
97
- rule 'digit_or_number' => 'NUMBER'
98
- rule 'count' => 'DIGIT_LIT'
99
- rule 'count' => 'INTEGER'
100
- rule 'times_suffix' => 'TIMES'
101
- rule 'times_suffix' => []
28
+ rule('srl' => 'expression').as 'start_rule'
29
+ rule('expression' => %w[pattern separator flags]).as 'flagged_expr'
30
+ rule('expression' => 'pattern').as 'simple_expr'
31
+ rule('pattern' => %w[pattern separator quantifiable]).as 'pattern_sequence'
32
+ rule('pattern' => 'quantifiable').as 'basic_pattern'
33
+ rule('separator' => 'COMMA').as 'comma_separator'
34
+ rule('separator' => []).as 'void_separator'
35
+ rule('flags' => %[flags separator single_flag]).as 'flag_sequence'
36
+ rule('single_flag' => %w[CASE INSENSITIVE]).as 'case_insensitive'
37
+ rule('single_flag' => %w[MULTI LINE]).as 'multi_line'
38
+ rule('single_flag' => %w[ALL LAZY]).as 'all_lazy'
39
+ rule('quantifiable' => %w[begin_anchor anchorable end_anchor]).as 'pinned_quantifiable'
40
+ rule('quantifiable' => %w[begin_anchor anchorable]).as 'begin_anchor_quantifiable'
41
+ rule('quantifiable' => %w[anchorable end_anchor]).as 'end_anchor_quantifiable'
42
+ rule('quantifiable' => 'anchorable').as 'simple_quantifiable'
43
+ rule('begin_anchor' => %w[STARTS WITH]).as 'starts_with'
44
+ rule('begin_anchor' => %w[BEGIN WITH]).as 'begin_with'
45
+ rule('end_anchor' => %w[MUST END]).as 'end_anchor'
46
+ rule('anchorable' => 'assertable').as 'simple_anchorable'
47
+ rule('anchorable' => %w[assertable assertion]).as 'asserted_anchorable'
48
+ rule('assertion' => %w[IF FOLLOWED BY assertable]).as 'if_followed'
49
+ rule('assertion' => %w[IF NOT FOLLOWED BY assertable]).as 'if_not_followed'
50
+ rule('assertion' => %w[IF ALREADY HAD assertable]).as 'if_had'
51
+ rule('assertion' => %w[IF NOT ALREADY HAD assertable]).as 'if_not_had'
52
+ rule('assertable' => 'term').as 'simple_assertable'
53
+ rule('assertable' => %w[term quantifier]).as 'quantified_assertable'
54
+ rule('term' => 'atom').as 'atom_term'
55
+ rule('term' => 'alternation').as 'alternation_term'
56
+ rule('term' => 'grouping').as 'grouping_term'
57
+ rule('term' => 'capturing_group').as 'capturing_group_atom'
58
+ rule('atom' => 'letter_range').as 'letter_range_atom'
59
+ rule('atom' => 'digit_range').as 'digit_range_atom'
60
+ rule('atom' => 'character_class').as 'character_class_atom'
61
+ rule('atom' => 'special_char').as 'special_char_atom'
62
+ rule('atom' => 'literal').as 'literal_atom'
63
+ rule('letter_range' => %w[LETTER FROM LETTER_LIT TO LETTER_LIT]).as 'lowercase_from_to'
64
+ rule('letter_range' => %w[UPPERCASE LETTER FROM LETTER_LIT TO LETTER_LIT]).as 'uppercase_from_to'
65
+ rule('letter_range' => 'LETTER').as 'any_lowercase'
66
+ rule('letter_range' => %w[UPPERCASE LETTER]).as 'any_uppercase'
67
+ rule('digit_range' => %w[digit_or_number FROM DIGIT_LIT TO DIGIT_LIT]).as 'digits_from_to'
68
+ rule('digit_range' => 'digit_or_number').as 'simple_digit_range'
69
+ rule('character_class' => %w[ANY CHARACTER]).as 'any_character'
70
+ rule('character_class' => %w[NO CHARACTER]).as 'no_character'
71
+ rule('character_class' => 'WHITESPACE').as 'whitespace'
72
+ rule('character_class' => %w[NO WHITESPACE]).as 'no_whitespace'
73
+ rule('character_class' => 'ANYTHING').as 'anything'
74
+ rule('character_class' => %w[ONE OF STRING_LIT]).as 'one_of'
75
+ rule('special_char' => 'TAB').as 'tab'
76
+ rule('special_char' => 'BACKSLASH').as 'backslash'
77
+ rule('special_char' => %w[NEW LINE]).as 'new_line'
78
+ rule('literal' => %w[LITERALLY STRING_LIT]).as 'literally'
79
+ rule('alternation' => %w[ANY OF LPAREN alternatives RPAREN]).as 'any_of'
80
+ rule('alternatives' => %w[alternatives separator quantifiable]).as 'alternative_list'
81
+ rule('alternatives' => 'quantifiable').as 'simple_alternative'
82
+ rule('grouping' => %w[LPAREN pattern RPAREN]).as 'grouping_parenthenses'
83
+ rule('capturing_group' => %w[CAPTURE assertable]).as 'capture'
84
+ rule('capturing_group' => %w[CAPTURE assertable UNTIL assertable]).as 'capture_until'
85
+ rule('capturing_group' => %w[CAPTURE assertable AS var_name]).as 'named_capture'
86
+ rule('capturing_group' => %w[CAPTURE assertable AS var_name UNTIL assertable]).as 'named_capture_until'
87
+ rule('var_name' => 'STRING_LIT').as 'var_name'
88
+ rule('quantifier' => 'ONCE').as 'once'
89
+ rule('quantifier' => 'TWICE').as 'twice'
90
+ rule('quantifier' => %w[EXACTLY count TIMES]).as 'exactly'
91
+ rule('quantifier' => %w[BETWEEN count AND count times_suffix]).as 'between_and'
92
+ rule('quantifier' => 'OPTIONAL').as 'optional'
93
+ rule('quantifier' => %w[ONCE OR MORE]).as 'once_or_more'
94
+ rule('quantifier' => %w[NEVER OR MORE]).as 'never_or_more'
95
+ rule('quantifier' => %w[AT LEAST count TIMES]).as 'at_least'
96
+ rule('digit_or_number' => 'DIGIT').as 'digit_keyword'
97
+ rule('digit_or_number' => 'NUMBER').as 'number_keyword'
98
+ rule('count' => 'DIGIT_LIT').as 'single_digit'
99
+ rule('count' => 'INTEGER').as 'integer_count'
100
+ rule('times_suffix' => 'TIMES').as 'times_keyword'
101
+ rule('times_suffix' => []).as 'times_dropped'
102
102
  end
103
103
 
104
104
  # And now build the grammar and make it accessible via a global constant
@@ -380,8 +380,7 @@ ENDS
380
380
 
381
381
  regexp = regexp_repr(result)
382
382
  expect(regexp.to_str).to eq('(?<foo>.+)m')
383
- end
384
-
383
+ end
385
384
  end # context
386
385
 
387
386
  context 'Parsing anchors:' do
@@ -3,7 +3,7 @@
3
3
 
4
4
  module Rley # Module used as a namespace
5
5
  # The version number of the gem.
6
- Version = '0.5.12'.freeze
6
+ Version = '0.5.13'.freeze
7
7
 
8
8
  # Brief description of the gem.
9
9
  Description = "Ruby implementation of the Earley's parsing algorithm".freeze
@@ -85,7 +85,11 @@ module Rley # This module is used as a namespace
85
85
  suffix = '_0'
86
86
  else
87
87
  prev_serial = previous.name.match(/_(\d+)$/)
88
- suffix = "_#{prev_serial[1].to_i + 1}"
88
+ if prev_serial
89
+ suffix = "_#{prev_serial[1].to_i + 1}"
90
+ else
91
+ suffix = '_0'
92
+ end
89
93
  end
90
94
 
91
95
  aProduction.name = prefix + suffix
@@ -16,7 +16,7 @@ module Rley # This module is used as a namespace
16
16
  # @return [NonTerminal] The left-hand side of the rule.
17
17
  attr_reader(:lhs)
18
18
 
19
- # @return [String] The unique name of the production rule.
19
+ # @return [String] The name of the production rule. It must be unique in a grammar.
20
20
  attr_accessor(:name)
21
21
 
22
22
  # @return [Boolean] A production is generative when all of its
@@ -62,6 +62,12 @@ module Rley # This module is used as a namespace
62
62
  result << " @generative=#{@generative}>"
63
63
  return result
64
64
  end
65
+
66
+ # A setter for the production name
67
+ # @param aName [String] the name of the production
68
+ def as(aName)
69
+ @name = aName
70
+ end
65
71
 
66
72
  private
67
73
 
@@ -54,12 +54,18 @@ module Rley # Open this namespace to avoid module qualifier prefixes
54
54
  end # context
55
55
 
56
56
  context 'Provided services:' do
57
- it 'should accept a name' do
57
+ it 'should accept a name (i)' do
58
58
  a_name = 'nominem'
59
59
  subject.name = a_name
60
60
  expect(subject.name).to eq(a_name)
61
61
  end
62
62
 
63
+ it 'should accept a name (ii)' do
64
+ a_name = 'nominem'
65
+ subject.as(a_name)
66
+ expect(subject.name).to eq(a_name)
67
+ end
68
+
63
69
  it 'should provide human-readable representation of itself' do
64
70
  subject.name = 'some name'
65
71
  prefix = /^#<Rley::Syntax::Production:\d+ @name="some name"/
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: rley
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.12
4
+ version: 0.5.13
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dimitri Geshef
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-02-03 00:00:00.000000000 Z
11
+ date: 2018-02-10 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: coveralls