ruby_parser 3.13.0 → 3.13.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -8,7 +8,7 @@ macro
8
8
 
9
9
  IDENT /^#{IDENT_CHAR}+/o
10
10
 
11
- ESC /\\((?>[0-7]{1,3}|x[0-9a-fA-F]{1,2}|M-[^\\]|(C-|c)[^\\]|u[0-9a-fA-F]+|u\{[0-9a-fA-F]+\}|[^0-7xMCc]))/
11
+ ESC /\\((?>[0-7]{1,3}|x[0-9a-fA-F]{1,2}|M-[^\\]|(C-|c)[^\\]|u[0-9a-fA-F]{1,4}|u\{[0-9a-fA-F]+\}|[^0-7xMCc]))/
12
12
  SIMPLE_STRING /((#{ESC}|\#(#{ESC}|[^\{\#\@\$\"\\])|[^\"\\\#])*)/o
13
13
  SSTRING /((\\.|[^\'])*)/
14
14
 
@@ -44,18 +44,17 @@ rule
44
44
  /[\]\)\}]/ process_brace_close
45
45
 
46
46
  : /\!/
47
- | in_arg_state? /\!\@/ { result :expr_arg, :tUBANG, "!@" }
47
+ | is_after_operator? /\!\@/ { result EXPR_ARG, :tUBANG, "!@" }
48
48
  | /\![=~]?/ { result :arg_state, TOKENS[text], text }
49
49
 
50
50
  : /\./
51
- | /\.\.\.?/ { result :expr_beg, TOKENS[text], text }
51
+ | /\.\.\.?/ { result EXPR_BEG, TOKENS[text], text }
52
52
  | /\.\d/ { rb_compile_error "no .<digit> floating literal anymore put 0 before dot" }
53
- | /\./ { self.lex_state = :expr_beg; result :expr_dot, :tDOT, "." }
53
+ | /\./ { self.lex_state = EXPR_BEG; result EXPR_DOT, :tDOT, "." }
54
54
 
55
55
  /\(/ process_paren
56
56
 
57
- # TODO: :expr_beg|:expr_label
58
- /\,/ { result :expr_beg, TOKENS[text], text }
57
+ /\,/ { result EXPR_PAR, TOKENS[text], text }
59
58
 
60
59
  : /=/
61
60
  | /\=\=\=|\=\=|\=~|\=>|\=(?!begin\b)/ { result arg_state, TOKENS[text], text }
@@ -63,7 +62,7 @@ rule
63
62
  | /\=(?=begin\b)/ { result arg_state, TOKENS[text], text }
64
63
 
65
64
  ruby22_label? /\"#{SIMPLE_STRING}\":/o process_label
66
- /\"(#{SIMPLE_STRING})\"/o { result :expr_end, :tSTRING, text[1..-2].gsub(ESC) { unescape $1 } }
65
+ /\"(#{SIMPLE_STRING})\"/o { result EXPR_END, :tSTRING, text[1..-2].gsub(ESC) { unescape $1 } }
67
66
  /\"/ { string STR_DQUOTE; result nil, :tSTRING_BEG, text }
68
67
 
69
68
  /\@\@?\d/ { rb_compile_error "`#{text}` is not allowed as a variable name" }
@@ -76,7 +75,7 @@ ruby22_label? /\"#{SIMPLE_STRING}\":/o process_label
76
75
  | /\:\:/ process_colon2
77
76
  | /\:/ process_colon1
78
77
 
79
- /->/ { result :expr_endfn, :tLAMBDA, nil }
78
+ /->/ { result EXPR_ENDFN, :tLAMBDA, nil }
80
79
 
81
80
  /[+-]/ process_plus_minus
82
81
 
@@ -97,57 +96,57 @@ ruby22_label? /\"#{SIMPLE_STRING}\":/o process_label
97
96
  was_label? /\'#{SSTRING}\':?/o process_label_or_string
98
97
 
99
98
  : /\|/
100
- | /\|\|\=/ { result :expr_beg, :tOP_ASGN, "||" }
101
- | /\|\|/ { result :expr_beg, :tOROP, "||" }
102
- | /\|\=/ { result :expr_beg, :tOP_ASGN, "|" }
103
- | /\|/ { result :arg_state, :tPIPE, "|" }
99
+ | /\|\|\=/ { result EXPR_BEG, :tOP_ASGN, "||" }
100
+ | /\|\|/ { result EXPR_BEG, :tOROP, "||" }
101
+ | /\|\=/ { result EXPR_BEG, :tOP_ASGN, "|" }
102
+ | /\|/ { state = is_after_operator? ? EXPR_ARG : EXPR_PAR; result state, :tPIPE, "|" }
104
103
 
105
104
  /\{/ process_brace_open
106
105
 
107
106
  : /\*/
108
- | /\*\*=/ { result :expr_beg, :tOP_ASGN, "**" }
107
+ | /\*\*=/ { result EXPR_BEG, :tOP_ASGN, "**" }
109
108
  | /\*\*/ { result(:arg_state, space_vs_beginning(:tDSTAR, :tDSTAR, :tPOW), "**") }
110
- | /\*\=/ { result(:expr_beg, :tOP_ASGN, "*") }
109
+ | /\*\=/ { result(EXPR_BEG, :tOP_ASGN, "*") }
111
110
  | /\*/ { result(:arg_state, space_vs_beginning(:tSTAR, :tSTAR, :tSTAR2), "*") }
112
111
 
113
112
  # TODO: fix result+process_lchevron to set command_start = true
114
113
  : /</
115
114
  | /\<\=\>/ { result :arg_state, :tCMP, "<=>" }
116
115
  | /\<\=/ { result :arg_state, :tLEQ, "<=" }
117
- | /\<\<\=/ { result :expr_beg, :tOP_ASGN, "<<" }
116
+ | /\<\<\=/ { result EXPR_BEG, :tOP_ASGN, "<<" }
118
117
  | /\<\</ process_lchevron
119
118
  | /\</ { result :arg_state, :tLT, "<" }
120
119
 
121
120
  : />/
122
121
  | /\>\=/ { result :arg_state, :tGEQ, ">=" }
123
- | /\>\>=/ { result :expr_beg, :tOP_ASGN, ">>" }
122
+ | /\>\>=/ { result EXPR_BEG, :tOP_ASGN, ">>" }
124
123
  | /\>\>/ { result :arg_state, :tRSHFT, ">>" }
125
124
  | /\>/ { result :arg_state, :tGT, ">" }
126
125
 
127
126
  : /\`/
128
- | expr_fname? /\`/ { result(:expr_end, :tBACK_REF2, "`") }
129
- | expr_dot? /\`/ { result((cmd_state ? :expr_cmdarg : :expr_arg), :tBACK_REF2, "`") }
127
+ | expr_fname? /\`/ { result(EXPR_END, :tBACK_REF2, "`") }
128
+ | expr_dot? /\`/ { result((cmd_state ? EXPR_CMDARG : EXPR_ARG), :tBACK_REF2, "`") }
130
129
  | /\`/ { string STR_XQUOTE, '`'; result(nil, :tXSTRING_BEG, "`") }
131
130
 
132
131
  /\?/ process_questionmark
133
132
 
134
133
  : /&/
135
- | /\&\&\=/ { result(:expr_beg, :tOP_ASGN, "&&") }
136
- | /\&\&/ { result(:expr_beg, :tANDOP, "&&") }
137
- | /\&\=/ { result(:expr_beg, :tOP_ASGN, "&" ) }
138
- | /\&\./ { result(:expr_dot, :tLONELY, "&.") }
134
+ | /\&\&\=/ { result(EXPR_BEG, :tOP_ASGN, "&&") }
135
+ | /\&\&/ { result(EXPR_BEG, :tANDOP, "&&") }
136
+ | /\&\=/ { result(EXPR_BEG, :tOP_ASGN, "&" ) }
137
+ | /\&\./ { result(EXPR_DOT, :tLONELY, "&.") }
139
138
  | /\&/ process_amper
140
139
 
141
140
  /\// process_slash
142
141
 
143
142
  : /\^/
144
- | /\^=/ { result(:expr_beg, :tOP_ASGN, "^") }
143
+ | /\^=/ { result(EXPR_BEG, :tOP_ASGN, "^") }
145
144
  | /\^/ { result(:arg_state, :tCARET, "^") }
146
145
 
147
- /\;/ { self.command_start = true; result(:expr_beg, :tSEMI, ";") }
146
+ /\;/ { self.command_start = true; result(EXPR_BEG, :tSEMI, ";") }
148
147
 
149
148
  : /~/
150
- | in_arg_state? /\~@/ { result(:arg_state, :tTILDE, "~") }
149
+ | is_after_operator? /\~@/ { result(:arg_state, :tTILDE, "~") }
151
150
  | /\~/ { result(:arg_state, :tTILDE, "~") }
152
151
 
153
152
  : /\\/
@@ -165,6 +164,7 @@ was_label? /\'#{SSTRING}\':?/o process_label_or_string
165
164
  | in_fname? /\$([1-9]\d*)/ process_gvar
166
165
  | /\$([1-9]\d*)/ process_nthref
167
166
  | /\$0/ process_gvar
167
+ | /\$[^[:ascii:]]+/ process_gvar
168
168
  | /\$\W|\$\z/ process_gvar_oddity
169
169
  | /\$\w+/ process_gvar
170
170
 
@@ -8,11 +8,16 @@
8
8
  #
9
9
  # lexical scanner definition for ruby
10
10
 
11
+
12
+ ##
13
+ # The generated lexer RubyLexer
14
+
11
15
  class RubyLexer
12
16
  require 'strscan'
13
17
 
18
+ # :stopdoc:
14
19
  IDENT = /^#{IDENT_CHAR}+/o
15
- ESC = /\\((?>[0-7]{1,3}|x[0-9a-fA-F]{1,2}|M-[^\\]|(C-|c)[^\\]|u[0-9a-fA-F]+|u\{[0-9a-fA-F]+\}|[^0-7xMCc]))/
20
+ ESC = /\\((?>[0-7]{1,3}|x[0-9a-fA-F]{1,2}|M-[^\\]|(C-|c)[^\\]|u[0-9a-fA-F]{1,4}|u\{[0-9a-fA-F]+\}|[^0-7xMCc]))/
16
21
  SIMPLE_STRING = /((#{ESC}|\#(#{ESC}|[^\{\#\@\$\"\\])|[^\"\\\#])*)/o
17
22
  SSTRING = /((\\.|[^\'])*)/
18
23
  INT_DEC = /[+]?(?:(?:[1-9][\d_]*|0)(?!\.\d)(ri|r|i)?\b|0d[0-9_]+)(ri|r|i)?/i
@@ -24,30 +29,56 @@ class RubyLexer
24
29
  NUM_BAD = /[+]?0[xbd]\b/i
25
30
  INT_OCT_BAD = /[+]?0o?[0-7_]*[89]/i
26
31
  FLOAT_BAD = /[+]?\d[\d_]*_(e|\.)/i
27
-
32
+ # :startdoc:
33
+ # :stopdoc:
28
34
  class LexerError < StandardError ; end
29
35
  class ScanError < LexerError ; end
36
+ # :startdoc:
37
+
38
+ ##
39
+ # The file name / path
30
40
 
31
41
  attr_accessor :filename
42
+
43
+ ##
44
+ # The StringScanner for this lexer.
45
+
32
46
  attr_accessor :ss
47
+
48
+ ##
49
+ # The current lexical state.
50
+
33
51
  attr_accessor :state
34
52
 
35
53
  alias :match :ss
36
54
 
55
+ ##
56
+ # The match groups for the current scan.
57
+
37
58
  def matches
38
59
  m = (1..9).map { |i| ss[i] }
39
60
  m.pop until m[-1] or m.empty?
40
61
  m
41
62
  end
42
63
 
64
+ ##
65
+ # Yields on the current action.
66
+
43
67
  def action
44
68
  yield
45
69
  end
46
70
 
71
+
72
+ ##
73
+ # The current scanner class. Must be overridden in subclasses.
74
+
47
75
  def scanner_class
48
76
  StringScanner
49
77
  end unless instance_methods(false).map(&:to_s).include?("scanner_class")
50
78
 
79
+ ##
80
+ # Parse the given string.
81
+
51
82
  def parse str
52
83
  self.ss = scanner_class.new str
53
84
  self.state ||= nil
@@ -55,6 +86,9 @@ class RubyLexer
55
86
  do_parse
56
87
  end
57
88
 
89
+ ##
90
+ # Read in and parse the file at +path+.
91
+
58
92
  def parse_file path
59
93
  self.filename = path
60
94
  open path do |f|
@@ -62,12 +96,18 @@ class RubyLexer
62
96
  end
63
97
  end
64
98
 
99
+ ##
100
+ # The current location in the parse.
101
+
65
102
  def location
66
103
  [
67
104
  (filename || "<input>"),
68
105
  ].compact.join(":")
69
106
  end
70
107
 
108
+ ##
109
+ # Lex the next token.
110
+
71
111
  def next_token
72
112
  return process_string if lex_strterm
73
113
  self.cmd_state = self.command_start
@@ -90,24 +130,24 @@ class RubyLexer
90
130
  process_brace_close text
91
131
  when ss.match?(/\!/) then
92
132
  case
93
- when in_arg_state? && (ss.skip(/\!\@/)) then
94
- action { result :expr_arg, :tUBANG, "!@" }
133
+ when is_after_operator? && (ss.skip(/\!\@/)) then
134
+ action { result EXPR_ARG, :tUBANG, "!@" }
95
135
  when text = ss.scan(/\![=~]?/) then
96
136
  action { result :arg_state, TOKENS[text], text }
97
137
  end # group /\!/
98
138
  when ss.match?(/\./) then
99
139
  case
100
140
  when text = ss.scan(/\.\.\.?/) then
101
- action { result :expr_beg, TOKENS[text], text }
141
+ action { result EXPR_BEG, TOKENS[text], text }
102
142
  when ss.skip(/\.\d/) then
103
143
  action { rb_compile_error "no .<digit> floating literal anymore put 0 before dot" }
104
144
  when ss.skip(/\./) then
105
- action { self.lex_state = :expr_beg; result :expr_dot, :tDOT, "." }
145
+ action { self.lex_state = EXPR_BEG; result EXPR_DOT, :tDOT, "." }
106
146
  end # group /\./
107
147
  when text = ss.scan(/\(/) then
108
148
  process_paren text
109
149
  when text = ss.scan(/\,/) then
110
- action { result :expr_beg, TOKENS[text], text }
150
+ action { result EXPR_PAR, TOKENS[text], text }
111
151
  when ss.match?(/=/) then
112
152
  case
113
153
  when text = ss.scan(/\=\=\=|\=\=|\=~|\=>|\=(?!begin\b)/) then
@@ -120,7 +160,7 @@ class RubyLexer
120
160
  when ruby22_label? && (text = ss.scan(/\"#{SIMPLE_STRING}\":/o)) then
121
161
  process_label text
122
162
  when text = ss.scan(/\"(#{SIMPLE_STRING})\"/o) then
123
- action { result :expr_end, :tSTRING, text[1..-2].gsub(ESC) { unescape $1 } }
163
+ action { result EXPR_END, :tSTRING, text[1..-2].gsub(ESC) { unescape $1 } }
124
164
  when text = ss.scan(/\"/) then
125
165
  action { string STR_DQUOTE; result nil, :tSTRING_BEG, text }
126
166
  when text = ss.scan(/\@\@?\d/) then
@@ -141,7 +181,7 @@ class RubyLexer
141
181
  process_colon1 text
142
182
  end # group /:/
143
183
  when ss.skip(/->/) then
144
- action { result :expr_endfn, :tLAMBDA, nil }
184
+ action { result EXPR_ENDFN, :tLAMBDA, nil }
145
185
  when text = ss.scan(/[+-]/) then
146
186
  process_plus_minus text
147
187
  when ss.match?(/[+\d]/) then
@@ -174,24 +214,24 @@ class RubyLexer
174
214
  when ss.match?(/\|/) then
175
215
  case
176
216
  when ss.skip(/\|\|\=/) then
177
- action { result :expr_beg, :tOP_ASGN, "||" }
217
+ action { result EXPR_BEG, :tOP_ASGN, "||" }
178
218
  when ss.skip(/\|\|/) then
179
- action { result :expr_beg, :tOROP, "||" }
219
+ action { result EXPR_BEG, :tOROP, "||" }
180
220
  when ss.skip(/\|\=/) then
181
- action { result :expr_beg, :tOP_ASGN, "|" }
221
+ action { result EXPR_BEG, :tOP_ASGN, "|" }
182
222
  when ss.skip(/\|/) then
183
- action { result :arg_state, :tPIPE, "|" }
223
+ action { state = is_after_operator? ? EXPR_ARG : EXPR_PAR; result state, :tPIPE, "|" }
184
224
  end # group /\|/
185
225
  when text = ss.scan(/\{/) then
186
226
  process_brace_open text
187
227
  when ss.match?(/\*/) then
188
228
  case
189
229
  when ss.skip(/\*\*=/) then
190
- action { result :expr_beg, :tOP_ASGN, "**" }
230
+ action { result EXPR_BEG, :tOP_ASGN, "**" }
191
231
  when ss.skip(/\*\*/) then
192
232
  action { result(:arg_state, space_vs_beginning(:tDSTAR, :tDSTAR, :tPOW), "**") }
193
233
  when ss.skip(/\*\=/) then
194
- action { result(:expr_beg, :tOP_ASGN, "*") }
234
+ action { result(EXPR_BEG, :tOP_ASGN, "*") }
195
235
  when ss.skip(/\*/) then
196
236
  action { result(:arg_state, space_vs_beginning(:tSTAR, :tSTAR, :tSTAR2), "*") }
197
237
  end # group /\*/
@@ -202,7 +242,7 @@ class RubyLexer
202
242
  when ss.skip(/\<\=/) then
203
243
  action { result :arg_state, :tLEQ, "<=" }
204
244
  when ss.skip(/\<\<\=/) then
205
- action { result :expr_beg, :tOP_ASGN, "<<" }
245
+ action { result EXPR_BEG, :tOP_ASGN, "<<" }
206
246
  when text = ss.scan(/\<\</) then
207
247
  process_lchevron text
208
248
  when ss.skip(/\</) then
@@ -213,7 +253,7 @@ class RubyLexer
213
253
  when ss.skip(/\>\=/) then
214
254
  action { result :arg_state, :tGEQ, ">=" }
215
255
  when ss.skip(/\>\>=/) then
216
- action { result :expr_beg, :tOP_ASGN, ">>" }
256
+ action { result EXPR_BEG, :tOP_ASGN, ">>" }
217
257
  when ss.skip(/\>\>/) then
218
258
  action { result :arg_state, :tRSHFT, ">>" }
219
259
  when ss.skip(/\>/) then
@@ -222,9 +262,9 @@ class RubyLexer
222
262
  when ss.match?(/\`/) then
223
263
  case
224
264
  when expr_fname? && (ss.skip(/\`/)) then
225
- action { result(:expr_end, :tBACK_REF2, "`") }
265
+ action { result(EXPR_END, :tBACK_REF2, "`") }
226
266
  when expr_dot? && (ss.skip(/\`/)) then
227
- action { result((cmd_state ? :expr_cmdarg : :expr_arg), :tBACK_REF2, "`") }
267
+ action { result((cmd_state ? EXPR_CMDARG : EXPR_ARG), :tBACK_REF2, "`") }
228
268
  when ss.skip(/\`/) then
229
269
  action { string STR_XQUOTE, '`'; result(nil, :tXSTRING_BEG, "`") }
230
270
  end # group /\`/
@@ -233,13 +273,13 @@ class RubyLexer
233
273
  when ss.match?(/&/) then
234
274
  case
235
275
  when ss.skip(/\&\&\=/) then
236
- action { result(:expr_beg, :tOP_ASGN, "&&") }
276
+ action { result(EXPR_BEG, :tOP_ASGN, "&&") }
237
277
  when ss.skip(/\&\&/) then
238
- action { result(:expr_beg, :tANDOP, "&&") }
278
+ action { result(EXPR_BEG, :tANDOP, "&&") }
239
279
  when ss.skip(/\&\=/) then
240
- action { result(:expr_beg, :tOP_ASGN, "&" ) }
280
+ action { result(EXPR_BEG, :tOP_ASGN, "&" ) }
241
281
  when ss.skip(/\&\./) then
242
- action { result(:expr_dot, :tLONELY, "&.") }
282
+ action { result(EXPR_DOT, :tLONELY, "&.") }
243
283
  when text = ss.scan(/\&/) then
244
284
  process_amper text
245
285
  end # group /&/
@@ -248,15 +288,15 @@ class RubyLexer
248
288
  when ss.match?(/\^/) then
249
289
  case
250
290
  when ss.skip(/\^=/) then
251
- action { result(:expr_beg, :tOP_ASGN, "^") }
291
+ action { result(EXPR_BEG, :tOP_ASGN, "^") }
252
292
  when ss.skip(/\^/) then
253
293
  action { result(:arg_state, :tCARET, "^") }
254
294
  end # group /\^/
255
295
  when ss.skip(/\;/) then
256
- action { self.command_start = true; result(:expr_beg, :tSEMI, ";") }
296
+ action { self.command_start = true; result(EXPR_BEG, :tSEMI, ";") }
257
297
  when ss.match?(/~/) then
258
298
  case
259
- when in_arg_state? && (ss.skip(/\~@/)) then
299
+ when is_after_operator? && (ss.skip(/\~@/)) then
260
300
  action { result(:arg_state, :tTILDE, "~") }
261
301
  when ss.skip(/\~/) then
262
302
  action { result(:arg_state, :tTILDE, "~") }
@@ -288,6 +328,8 @@ class RubyLexer
288
328
  process_nthref text
289
329
  when text = ss.scan(/\$0/) then
290
330
  process_gvar text
331
+ when text = ss.scan(/\$[^[:ascii:]]+/) then
332
+ process_gvar text
291
333
  when text = ss.scan(/\$\W|\$\z/) then
292
334
  process_gvar_oddity text
293
335
  when text = ss.scan(/\$\w+/) then
@@ -9,6 +9,8 @@ class RubyParser
9
9
 
10
10
  VERSIONS = []
11
11
 
12
+ attr_accessor :current
13
+
12
14
  class Parser < Racc::Parser
13
15
  include RubyParserStuff
14
16
 
@@ -30,7 +32,7 @@ class RubyParser
30
32
  def process s, f = "(string)", t = 10
31
33
  e = nil
32
34
  VERSIONS.each do |klass|
33
- parser = klass.new
35
+ self.current = parser = klass.new
34
36
  begin
35
37
  return parser.process s, f, t
36
38
  rescue Racc::ParseError, RubyParser::SyntaxError => exc
@@ -73,7 +73,7 @@ preclow
73
73
  rule
74
74
 
75
75
  program: {
76
- self.lexer.lex_state = :expr_beg
76
+ self.lexer.lex_state = EXPR_BEG
77
77
  }
78
78
  top_compstmt
79
79
  {
@@ -175,7 +175,7 @@ rule
175
175
 
176
176
  stmt: kALIAS fitem
177
177
  {
178
- lexer.lex_state = :expr_fname
178
+ lexer.lex_state = EXPR_FNAME
179
179
  result = self.lexer.lineno
180
180
  }
181
181
  fitem
@@ -302,12 +302,14 @@ rule
302
302
  expr, = val
303
303
  result = value_expr expr
304
304
  }
305
+ #if V >= 24
305
306
  | command_call kRESCUE_MOD stmt
306
307
  {
307
308
  expr, _, resbody = val
308
309
  expr = value_expr expr
309
310
  result = new_rescue(expr, new_resbody(s(:array), resbody))
310
311
  }
312
+ #endif
311
313
  | command_asgn
312
314
 
313
315
  expr: command_call
@@ -643,14 +645,14 @@ rule
643
645
  fname: tIDENTIFIER | tCONSTANT | tFID
644
646
  | op
645
647
  {
646
- lexer.lex_state = :expr_end
648
+ lexer.lex_state = EXPR_END
647
649
  result = val[0]
648
650
  }
649
651
 
650
652
  | reswords
651
653
  {
652
654
  (sym, _line), = val
653
- lexer.lex_state = :expr_end
655
+ lexer.lex_state = EXPR_END
654
656
  result = sym
655
657
  }
656
658
 
@@ -669,7 +671,7 @@ rule
669
671
  |
670
672
  undef_list tCOMMA
671
673
  {
672
- lexer.lex_state = :expr_fname
674
+ lexer.lex_state = EXPR_FNAME
673
675
  }
674
676
  fitem
675
677
  {
@@ -722,13 +724,21 @@ rule
722
724
  }
723
725
  | primary_value tCOLON2 tCONSTANT tOP_ASGN arg_rhs
724
726
  {
725
- # TODO: assignment
726
- raise "not yet: %p" % [val]
727
+ lhs1, _, lhs2, op, rhs = val
728
+
729
+ lhs = s(:colon2, lhs1, lhs2.to_sym).line lhs1.line
730
+ result = new_const_op_asgn [lhs, op, rhs]
727
731
  }
728
- | tCOLON3 tCONSTANT tOP_ASGN arg_rhs
732
+ | tCOLON3 tCONSTANT
729
733
  {
730
- # TODO: assignment
731
- raise "not yet: %p" % [val]
734
+ result = self.lexer.lineno
735
+ }
736
+ tOP_ASGN arg_rhs
737
+ {
738
+ _, lhs, line, op, rhs = val
739
+
740
+ lhs = s(:colon3, lhs.to_sym).line line
741
+ result = new_const_op_asgn [lhs, op, rhs]
732
742
  }
733
743
  | backref tOP_ASGN arg_rhs
734
744
  {
@@ -1085,7 +1095,7 @@ rule
1085
1095
  }
1086
1096
  | tLPAREN_ARG rparen
1087
1097
  {
1088
- # TODO: lex_state = :expr_endarg in between
1098
+ # TODO: lex_state = EXPR_ENDARG in between
1089
1099
  debug20 13, val, result
1090
1100
  }
1091
1101
  | tLPAREN_ARG
@@ -1096,7 +1106,7 @@ rule
1096
1106
  }
1097
1107
  stmt
1098
1108
  {
1099
- lexer.lex_state = :expr_endarg
1109
+ lexer.lex_state = EXPR_ENDARG
1100
1110
  }
1101
1111
  rparen
1102
1112
  {
@@ -1296,13 +1306,13 @@ rule
1296
1306
  | k_def singleton dot_or_colon
1297
1307
  {
1298
1308
  self.comments.push self.lexer.comments
1299
- lexer.lex_state = :expr_fname
1309
+ lexer.lex_state = EXPR_FNAME
1300
1310
  }
1301
1311
  fname
1302
1312
  {
1303
1313
  self.in_single += 1
1304
1314
  self.env.extend
1305
- lexer.lex_state = :expr_endfn # force for args
1315
+ lexer.lex_state = EXPR_ENDFN # force for args
1306
1316
  result = [lexer.lineno, self.lexer.cmdarg.stack.dup]
1307
1317
  lexer.cmdarg.stack.replace [false]
1308
1318
  }
@@ -1976,7 +1986,7 @@ regexp_contents: none
1976
1986
  result = lexer.lex_strterm
1977
1987
 
1978
1988
  lexer.lex_strterm = nil
1979
- lexer.lex_state = :expr_beg
1989
+ lexer.lex_state = EXPR_BEG
1980
1990
  }
1981
1991
  string_dvar
1982
1992
  {
@@ -1997,7 +2007,7 @@ regexp_contents: none
1997
2007
  lexer.brace_nest = 0
1998
2008
  lexer.string_nest = 0
1999
2009
 
2000
- lexer.lex_state = :expr_beg
2010
+ lexer.lex_state = EXPR_BEG
2001
2011
  }
2002
2012
  compstmt
2003
2013
  tSTRING_DEND
@@ -2038,7 +2048,7 @@ regexp_contents: none
2038
2048
 
2039
2049
  symbol: tSYMBEG sym
2040
2050
  {
2041
- lexer.lex_state = :expr_end
2051
+ lexer.lex_state = EXPR_END
2042
2052
  result = val[1].to_sym
2043
2053
  }
2044
2054
  | tSYMBOL
@@ -2050,7 +2060,7 @@ regexp_contents: none
2050
2060
 
2051
2061
  dsym: tSYMBEG xstring_contents tSTRING_END
2052
2062
  {
2053
- lexer.lex_state = :expr_end
2063
+ lexer.lex_state = EXPR_END
2054
2064
  result = val[1]
2055
2065
 
2056
2066
  result ||= s(:str, "")
@@ -2140,7 +2150,7 @@ keyword_variable: kNIL { result = s(:nil) }
2140
2150
 
2141
2151
  superclass: tLT
2142
2152
  {
2143
- lexer.lex_state = :expr_beg
2153
+ lexer.lex_state = EXPR_BEG
2144
2154
  lexer.command_start = true
2145
2155
  }
2146
2156
  expr_value term
@@ -2155,13 +2165,13 @@ keyword_variable: kNIL { result = s(:nil) }
2155
2165
  f_arglist: tLPAREN2 f_args rparen
2156
2166
  {
2157
2167
  result = val[1]
2158
- self.lexer.lex_state = :expr_beg
2168
+ self.lexer.lex_state = EXPR_BEG
2159
2169
  self.lexer.command_start = true
2160
2170
  }
2161
2171
  | {
2162
2172
  result = self.in_kwarg
2163
2173
  self.in_kwarg = true
2164
- # TODO: self.lexer.lex_state |= :expr_label
2174
+ self.lexer.lex_state |= EXPR_LABEL
2165
2175
  }
2166
2176
  f_args term
2167
2177
  {
@@ -2169,7 +2179,7 @@ keyword_variable: kNIL { result = s(:nil) }
2169
2179
 
2170
2180
  self.in_kwarg = kwarg
2171
2181
  result = args
2172
- lexer.lex_state = :expr_beg
2182
+ lexer.lex_state = EXPR_BEG
2173
2183
  lexer.command_start = true
2174
2184
  }
2175
2185
 
@@ -2482,7 +2492,7 @@ keyword_variable: kNIL { result = s(:nil) }
2482
2492
  singleton: var_ref
2483
2493
  | tLPAREN2
2484
2494
  {
2485
- lexer.lex_state = :expr_beg
2495
+ lexer.lex_state = EXPR_BEG
2486
2496
  }
2487
2497
  expr rparen
2488
2498
  {
@@ -2563,6 +2573,7 @@ end
2563
2573
 
2564
2574
  require "ruby_lexer"
2565
2575
  require "ruby_parser_extras"
2576
+ include RubyLexer::State::Values
2566
2577
 
2567
2578
  # :stopdoc:
2568
2579