rubylexer 0.7.2 → 0.7.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/History.txt +25 -0
- data/Manifest.txt +5 -8
- data/Rakefile +15 -3
- data/lib/rubylexer.rb +161 -72
- data/lib/rubylexer/context.rb +8 -2
- data/lib/rubylexer/rubycode.rb +2 -0
- data/lib/rubylexer/rulexer.rb +13 -8
- data/{test/data → lib/rubylexer/test}/illegal_oneliners.rb +0 -0
- data/{test/data → lib/rubylexer/test}/illegal_stanzas.rb +0 -0
- data/{test/data → lib/rubylexer/test}/oneliners.rb +3 -0
- data/{test/data → lib/rubylexer/test}/stanzas.rb +0 -1
- data/lib/rubylexer/test/testcases.rb +11 -0
- data/lib/rubylexer/token.rb +17 -6
- data/lib/rubylexer/version.rb +1 -1
- data/test/code/locatetest.rb +63 -63
- data/test/code/regression.rb +2 -1
- data/test/code/rubylexervsruby.rb +1 -0
- metadata +10 -14
- data/rubylexer.vpj +0 -108
- data/test/code/testcases.rb +0 -11
- data/test/data/heremonsters.rb.broken.save +0 -68
- data/test/data/tokentest.assert.rb.can +0 -7
data/History.txt
CHANGED
@@ -1,3 +1,28 @@
|
|
1
|
+
=== 0.7.3/4-19-2009
|
2
|
+
* 9 Bugfixes:
|
3
|
+
* remember whether comma was seen in paren context
|
4
|
+
* reducing the warning load
|
5
|
+
* remember whether we're in a method def when starting a recursive lexer
|
6
|
+
* (so that class vars can be marked as being in a method if in #{})
|
7
|
+
* make sure Token#to_s always outputs _something_ halfway sensible
|
8
|
+
* make sure funclike keywords come out as a keyword
|
9
|
+
* break/next/return method after . was not detected properly
|
10
|
+
* need a NoWsToken before :: in names of compound modules
|
11
|
+
* abort implicit paren contexts when elsif seen
|
12
|
+
* all files should be world-readable now
|
13
|
+
|
14
|
+
* 9 Minor Enhancements:
|
15
|
+
* move test data into lib/ dir so I can get to it better from redparse
|
16
|
+
* split parse_keywords into separate methods for each keyword
|
17
|
+
* (I had hoped to speed up keywords, but keyword_def is still slow)
|
18
|
+
* more of the lists of keywords are now available as arrays as well
|
19
|
+
* (parenthesis) KeywordToken needs to know if it's for grouping or call
|
20
|
+
* minor speedups in newline and ident
|
21
|
+
* don't panic if rubygems not available
|
22
|
+
* make linenum publicly available in RubyCode
|
23
|
+
* various other little helper methods needed by redparse in Tokens
|
24
|
+
* hack Rakefile so 'rake test' will stay in 1 process (keeps netbeans happy)
|
25
|
+
|
1
26
|
=== 0.7.2/10-12-2008
|
2
27
|
* 12 Minor Enhancements:
|
3
28
|
* a new context for then kw expected
|
data/Manifest.txt
CHANGED
@@ -43,7 +43,6 @@ test/data/23.rb
|
|
43
43
|
test/data/lbrack.rb
|
44
44
|
test/data/untitled1.rb
|
45
45
|
test/data/rescue.rb
|
46
|
-
test/data/tokentest.assert.rb.can
|
47
46
|
test/data/pleac.rb.broken
|
48
47
|
test/data/heart.rb
|
49
48
|
test/data/s.rb
|
@@ -63,7 +62,6 @@ test/code/torment
|
|
63
62
|
test/code/locatetest
|
64
63
|
test/code/deletewarns.rb
|
65
64
|
lib/rubylexer/0.7.1.rb
|
66
|
-
rubylexer.vpj
|
67
65
|
test/code/all_the_gems.rb
|
68
66
|
test/code/all_the_raas.rb
|
69
67
|
test/code/all_the_rubies.rb
|
@@ -73,7 +71,7 @@ test/code/lexloop
|
|
73
71
|
test/code/regression.rb
|
74
72
|
test/code/strgen.rb
|
75
73
|
test/code/tarball.rb
|
76
|
-
test/
|
74
|
+
lib/rubylexer/test/testcases.rb
|
77
75
|
test/data/chunky.plain.rb
|
78
76
|
test/data/cvtesc.rb
|
79
77
|
test/data/__eof2.rb
|
@@ -91,10 +89,9 @@ test/data/heremonsters_dos.rb
|
|
91
89
|
test/data/heremonsters_dos.rb.broken
|
92
90
|
test/data/heremonsters.rb
|
93
91
|
test/data/heremonsters.rb.broken
|
94
|
-
test/data/heremonsters.rb.broken.save
|
95
92
|
test/data/here_squote.rb
|
96
|
-
test/
|
97
|
-
test/
|
93
|
+
lib/rubylexer/test/illegal_oneliners.rb
|
94
|
+
lib/rubylexer/test/illegal_stanzas.rb
|
98
95
|
test/data/make_ws_strdelim.rb
|
99
96
|
test/data/maven2_builer_test.rb
|
100
97
|
test/data/migration.rb
|
@@ -102,10 +99,10 @@ test/data/modl_dos.rb
|
|
102
99
|
test/data/modl_fails.rb
|
103
100
|
test/data/modl.rb
|
104
101
|
test/data/multilinestring.rb
|
105
|
-
test/
|
102
|
+
lib/rubylexer/test/oneliners.rb
|
106
103
|
test/data/simple_dos.rb
|
107
104
|
test/data/simple.rb
|
108
|
-
test/
|
105
|
+
lib/rubylexer/test/stanzas.rb
|
109
106
|
test/data/strdelim_crlf.rb
|
110
107
|
test/data/stuff2.rb
|
111
108
|
test/data/stuff3.rb
|
data/Rakefile
CHANGED
@@ -2,9 +2,21 @@
|
|
2
2
|
# Distributed under the terms of Ruby's license.
|
3
3
|
require 'rubygems'
|
4
4
|
require 'hoe'
|
5
|
+
|
6
|
+
if $*==["test"]
|
7
|
+
#hack to get 'rake test' to stay in one process
|
8
|
+
#which keeps netbeans happy
|
9
|
+
# Object.send :remove_const, :RubyLexer
|
10
|
+
$:<<"lib"
|
11
|
+
require 'rubylexer.rb'
|
12
|
+
require "test/unit"
|
13
|
+
require "test/code/regression.rb"
|
14
|
+
Test::Unit::AutoRunner.run
|
15
|
+
exit
|
16
|
+
end
|
17
|
+
|
5
18
|
require 'lib/rubylexer/version.rb'
|
6
19
|
|
7
|
-
|
8
20
|
readme=open("README.txt")
|
9
21
|
readme.readline("\n=== DESCRIPTION:")
|
10
22
|
readme.readline("\n\n")
|
@@ -18,8 +30,8 @@ require 'lib/rubylexer/version.rb'
|
|
18
30
|
_.test_globs=["test/code/regression.rb"]
|
19
31
|
_.description=desc
|
20
32
|
_.summary=desc[/\A[^.]+\./]
|
21
|
-
_.spec_extras={:bindir=>''}
|
22
|
-
_.rdoc_pattern=/\A(howtouse\.txt|testing\.txt|README\.txt|lib
|
33
|
+
_.spec_extras={:bindir=>'',:rdoc_options=>'-x lib/rubylexer/test'}
|
34
|
+
_.rdoc_pattern=/\A(howtouse\.txt|testing\.txt|README\.txt|lib\/[^\/]*\.rb|lib\/rubylexer\/[^\d][^\/]*\.rb)\Z/
|
23
35
|
end
|
24
36
|
|
25
37
|
|
data/lib/rubylexer.rb
CHANGED
@@ -52,10 +52,14 @@ class RubyLexer
|
|
52
52
|
BEGINWORDLIST=%w(def class module begin for case do)+OPORBEGINWORDLIST
|
53
53
|
OPORBEGINWORDS="(#{OPORBEGINWORDLIST.join '|'})"
|
54
54
|
BEGINWORDS=/^(#{BEGINWORDLIST.join '|'})$/o
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
55
|
+
FUNCLIKE_KEYWORDLIST=%w/break next redo return yield retry super BEGIN END/
|
56
|
+
FUNCLIKE_KEYWORDS=/^(#{FUNCLIKE_KEYWORDLIST.join '|'})$/
|
57
|
+
VARLIKE_KEYWORDLIST=%w/__FILE__ __LINE__ false nil self true/
|
58
|
+
VARLIKE_KEYWORDS=/^(#{VARLIKE_KEYWORDLIST.join '|'})$/
|
59
|
+
INNERBOUNDINGWORDLIST=%w"else elsif ensure in then rescue when"
|
60
|
+
INNERBOUNDINGWORDS="(#{INNERBOUNDINGWORDLIST.join '|'})"
|
61
|
+
BINOPWORDLIST=%w"and or"
|
62
|
+
BINOPWORDS="(#{BINOPWORDLIST.join '|'})"
|
59
63
|
NEVERSTARTPARAMLISTWORDS=/\A(#{OPORBEGINWORDS}|#{INNERBOUNDINGWORDS}|#{BINOPWORDS}|end)([^a-zA-Z0-9_!?=]|\Z)/o
|
60
64
|
NEVERSTARTPARAMLISTFIRST=CharSet['aoeitrwu'] #chars that begin NEVERSTARTPARAMLIST
|
61
65
|
NEVERSTARTPARAMLISTMAXLEN=7 #max len of a NEVERSTARTPARAMLIST
|
@@ -155,6 +159,7 @@ class RubyLexer
|
|
155
159
|
@localvars_stack.last
|
156
160
|
end
|
157
161
|
|
162
|
+
attr_accessor :in_def
|
158
163
|
attr :localvars_stack
|
159
164
|
attr :offset_adjust
|
160
165
|
attr_writer :pending_here_bodies
|
@@ -258,6 +263,7 @@ class RubyLexer
|
|
258
263
|
private
|
259
264
|
#-----------------------------------
|
260
265
|
def inside_method_def?
|
266
|
+
return true if (defined? @in_def) and @in_def
|
261
267
|
@parsestack.reverse_each{|ctx|
|
262
268
|
ctx.starter=='def' and ctx.state!=:saw_def and return true
|
263
269
|
}
|
@@ -376,12 +382,14 @@ private
|
|
376
382
|
@moretokens.unshift(*parse_keywords(str,oldpos) do |tok|
|
377
383
|
#if not a keyword,
|
378
384
|
case str
|
379
|
-
when FUNCLIKE_KEYWORDS;
|
385
|
+
when FUNCLIKE_KEYWORDS; except=tok
|
380
386
|
when VARLIKE_KEYWORDS,RUBYKEYWORDS; raise "shouldnt see keywords here, now"
|
381
387
|
end
|
382
388
|
was_last=@last_operative_token
|
383
389
|
@last_operative_token=tok if tok
|
384
|
-
safe_recurse { |a| var_or_meth_name(str,was_last,oldpos,after_nonid_op?{true}) }
|
390
|
+
normally=safe_recurse { |a| var_or_meth_name(str,was_last,oldpos,after_nonid_op?{true}) }
|
391
|
+
(Array===normally ? normally[0]=except : normally=except) if except
|
392
|
+
normally
|
385
393
|
end)
|
386
394
|
return @moretokens.shift
|
387
395
|
end
|
@@ -390,6 +398,7 @@ private
|
|
390
398
|
IDENTREX={}
|
391
399
|
def identifier_as_string(context)
|
392
400
|
#must begin w/ letter or underscore
|
401
|
+
#char class needs changing here for utf8 support
|
393
402
|
/[_a-z]/i===nextchar.chr or return
|
394
403
|
|
395
404
|
#equals, question mark, and exclamation mark
|
@@ -409,7 +418,7 @@ private
|
|
409
418
|
end
|
410
419
|
@in_def_name||context==?: and trailers<<"|=(?![=~>])"
|
411
420
|
|
412
|
-
@file.scan(IDENTREX[trailers]||=/^[_a-z][a-z0-9_]*(?:#{trailers})
|
421
|
+
@file.scan(IDENTREX[trailers]||=/^(?>[_a-z][a-z0-9_]*(?:#{trailers})?)/i)
|
413
422
|
end
|
414
423
|
|
415
424
|
#-----------------------------------
|
@@ -450,7 +459,7 @@ private
|
|
450
459
|
@defining_lvar or case ctx=@parsestack.last
|
451
460
|
#when ForSMContext; ctx.state==:for
|
452
461
|
when RescueSMContext
|
453
|
-
lasttok.ident=="=>" and @file.match? /\A[\s\v]*([:;#\n]|then[^a-zA-Z0-9_])/m
|
462
|
+
lasttok.ident=="=>" and @file.match?( /\A[\s\v]*([:;#\n]|then[^a-zA-Z0-9_])/m )
|
454
463
|
#when BlockParamListLhsContext; true
|
455
464
|
end
|
456
465
|
end
|
@@ -589,20 +598,20 @@ private
|
|
589
598
|
# )
|
590
599
|
|
591
600
|
#look ahead for closing paren (after some whitespace...)
|
592
|
-
want_parens=false if @file.match? /\A.(?:\s|\v|\#.*\n)*\)/
|
601
|
+
want_parens=false if @file.match?( /\A.(?:\s|\v|\#.*\n)*\)/ )
|
593
602
|
# afterparen=@file.pos
|
594
603
|
# getchar
|
595
604
|
# ignored_tokens(true)
|
596
605
|
# want_parens=false if nextchar==?)
|
597
606
|
# @file.pos=afterparen
|
598
607
|
want_parens=true if /^(return|break|next)$/===@last_operative_token.ident and not(
|
599
|
-
KeywordToken===lasttok and /^(
|
608
|
+
KeywordToken===lasttok and /^(\.|::)$/===lasttok.ident
|
600
609
|
)
|
601
610
|
want_parens ? 1 : 0
|
602
611
|
when ?},?],?),?;,(?^ unless @enable_macro), ?|, ?>, ?,, ?., ?=; 2
|
603
612
|
when ?+, ?-, ?%, ?/, (?^ if @enable_macro)
|
604
613
|
if /^(return|break|next)$/===@last_operative_token.ident and not(
|
605
|
-
KeywordToken===lasttok and /^(
|
614
|
+
KeywordToken===lasttok and /^(\.|::)$/===lasttok.ident
|
606
615
|
)
|
607
616
|
1
|
608
617
|
else
|
@@ -611,7 +620,7 @@ private
|
|
611
620
|
when ?*, ?&
|
612
621
|
# lasttok=@last_operative_token
|
613
622
|
if /^(return|break|next)$/===@last_operative_token.ident and not(
|
614
|
-
KeywordToken===lasttok and /^(
|
623
|
+
KeywordToken===lasttok and /^(\.|::)$/===lasttok.ident
|
615
624
|
)
|
616
625
|
1
|
617
626
|
else
|
@@ -665,7 +674,7 @@ private
|
|
665
674
|
last=result.last
|
666
675
|
last.set_callsite! false if last.respond_to? :callsite? and last.callsite? #KeywordToken===last and last.ident==')'
|
667
676
|
if /^(break|next|return)$/===name and
|
668
|
-
!(KeywordToken===lasttok and /^(
|
677
|
+
!(KeywordToken===lasttok and /^(\.|::)$/===lasttok.ident)
|
669
678
|
ty=KWParamListContextNoParen
|
670
679
|
else
|
671
680
|
ty=ParamListContextNoParen
|
@@ -802,6 +811,9 @@ private
|
|
802
811
|
#-----------------------------------
|
803
812
|
def enable_macros!
|
804
813
|
@enable_macro="macro"
|
814
|
+
class <<self
|
815
|
+
alias keyword_macro keyword_def
|
816
|
+
end
|
805
817
|
end
|
806
818
|
public :enable_macros!
|
807
819
|
|
@@ -836,13 +848,16 @@ private
|
|
836
848
|
#parse keywords now, to prevent confusion over bare symbols
|
837
849
|
#and match end with corresponding preceding def or class or whatever.
|
838
850
|
#if arg is not a keyword, the block is called
|
839
|
-
def parse_keywords(str,offset)
|
851
|
+
def parse_keywords(str,offset,&block)
|
840
852
|
assert @moretokens.empty?
|
841
|
-
assert !(KeywordToken===@last_operative_token and /A(
|
853
|
+
assert !(KeywordToken===@last_operative_token and /A(\.|::|def)\Z/===@last_operative_token.ident)
|
842
854
|
result=[KeywordToken.new(str,offset)]
|
843
855
|
|
844
|
-
|
845
|
-
|
856
|
+
m="keyword_#{str}"
|
857
|
+
respond_to?(m) ? (send m,str,offset,result,&block) : block[MethNameToken.new(str)]
|
858
|
+
end
|
859
|
+
public #these have to be public so respond_to? can see them (sigh)
|
860
|
+
def keyword_end(str,offset,result)
|
846
861
|
result.unshift(*abort_noparens!(str))
|
847
862
|
@parsestack.last.see self,:semi #sorta hacky... should make an :end event instead?
|
848
863
|
|
@@ -859,8 +874,10 @@ private
|
|
859
874
|
BEGINWORDS===start or lexerror result.last, "end does not match #{start or "nil"}"
|
860
875
|
/^(do)$/===start and localvars.end_block
|
861
876
|
/^(class|module|def)$/===start and @localvars_stack.pop
|
877
|
+
return result
|
878
|
+
end
|
862
879
|
|
863
|
-
|
880
|
+
def keyword_module(str,offset,result)
|
864
881
|
result.first.has_end!
|
865
882
|
@parsestack.push WantsEndContext.new(str,@linenum)
|
866
883
|
@localvars_stack.push SymbolTable.new
|
@@ -885,17 +902,22 @@ private
|
|
885
902
|
end
|
886
903
|
@moretokens.push VarNameToken.new(name,offset+incr)
|
887
904
|
break unless dc
|
905
|
+
@moretokens.push NoWsToken.new(offset+md.end(0)-2)
|
888
906
|
@moretokens.push KeywordToken.new('::',offset+md.end(0)-2)
|
889
907
|
end
|
890
908
|
@moretokens.push EndHeaderToken.new(input_position)
|
891
|
-
|
909
|
+
return result
|
910
|
+
end
|
892
911
|
|
893
912
|
|
894
|
-
|
913
|
+
def keyword_class(str,offset,result)
|
895
914
|
result.first.has_end!
|
896
915
|
@parsestack.push ClassContext.new(str,@linenum)
|
916
|
+
return result
|
917
|
+
end
|
918
|
+
|
897
919
|
|
898
|
-
|
920
|
+
def keyword_if(str,offset,result) #could be infix form without end
|
899
921
|
if after_nonid_op?{false} #prefix form
|
900
922
|
result.first.has_end!
|
901
923
|
@parsestack.push WantsEndContext.new(str,@linenum)
|
@@ -903,12 +925,23 @@ private
|
|
903
925
|
else #infix form
|
904
926
|
result.unshift(*abort_noparens!(str))
|
905
927
|
end
|
906
|
-
|
928
|
+
return result
|
929
|
+
end
|
930
|
+
alias keyword_unless keyword_if
|
931
|
+
|
932
|
+
def keyword_elsif(str,offset,result)
|
933
|
+
result.unshift(*abort_noparens!(str))
|
907
934
|
@parsestack.push ExpectThenOrNlContext.new(str,@linenum)
|
908
|
-
|
935
|
+
return result
|
936
|
+
end
|
937
|
+
def keyword_begin(str,offset,result)
|
909
938
|
result.first.has_end!
|
910
939
|
@parsestack.push WantsEndContext.new(str,@linenum)
|
911
|
-
|
940
|
+
return result
|
941
|
+
end
|
942
|
+
|
943
|
+
alias keyword_case keyword_begin
|
944
|
+
def keyword_while(str,offset,result) #could be infix form without end
|
912
945
|
if after_nonid_op?{false} #prefix form
|
913
946
|
result.first.has_end!
|
914
947
|
@parsestack.push WantsEndContext.new(str,@linenum)
|
@@ -917,14 +950,21 @@ private
|
|
917
950
|
else #infix form
|
918
951
|
result.unshift(*abort_noparens!(str))
|
919
952
|
end
|
920
|
-
|
953
|
+
return result
|
954
|
+
end
|
955
|
+
|
956
|
+
alias keyword_until keyword_while
|
957
|
+
|
958
|
+
def keyword_for(str,offset,result)
|
921
959
|
result.first.has_end!
|
922
960
|
result.push KwParamListStartToken.new(offset+str.length)
|
923
961
|
# corresponding EndToken emitted leaving ForContext ("in" branch, below)
|
924
962
|
@parsestack.push WantsEndContext.new(str,@linenum)
|
925
963
|
#expect_do_or_end_or_nl! str #handled by ForSMContext now
|
926
964
|
@parsestack.push ForSMContext.new(@linenum)
|
927
|
-
|
965
|
+
return result
|
966
|
+
end
|
967
|
+
def keyword_do(str,offset,result)
|
928
968
|
result.unshift(*abort_noparens_for_do!(str))
|
929
969
|
if ExpectDoOrNlContext===@parsestack.last
|
930
970
|
@parsestack.pop
|
@@ -936,12 +976,17 @@ private
|
|
936
976
|
localvars.start_block
|
937
977
|
block_param_list_lookahead
|
938
978
|
end
|
939
|
-
|
979
|
+
return result
|
980
|
+
end
|
981
|
+
def keyword_def(str,offset,result) #macros too, if enabled
|
940
982
|
result.first.has_end!
|
941
983
|
@parsestack.push ctx=DefContext.new(@linenum)
|
942
984
|
ctx.state=:saw_def
|
943
|
-
|
944
|
-
|
985
|
+
old_moretokens=@moretokens
|
986
|
+
@moretokens=[]
|
987
|
+
aa=@moretokens
|
988
|
+
#safe_recurse { |aa|
|
989
|
+
set_last_token KeywordToken.new(str) #hack
|
945
990
|
result.concat ignored_tokens
|
946
991
|
|
947
992
|
#read an expr like a.b.c or a::b::c
|
@@ -960,7 +1005,7 @@ private
|
|
960
1005
|
end until parencount==0 #@parsestack.size==old_size
|
961
1006
|
@localvars_stack.push SymbolTable.new
|
962
1007
|
else #no parentheses, all tail
|
963
|
-
|
1008
|
+
set_last_token KeywordToken.new(".") #hack hack
|
964
1009
|
tokindex=result.size
|
965
1010
|
result << tok=symbol(false,false)
|
966
1011
|
name=tok.to_s
|
@@ -1006,11 +1051,11 @@ private
|
|
1006
1051
|
#a could even be a keyword (eg self or block_given?).
|
1007
1052
|
end
|
1008
1053
|
#read tail: .b.c.d etc
|
1009
|
-
result.reverse_each{|res| break set_last_token res unless StillIgnoreToken===res}
|
1054
|
+
result.reverse_each{|res| break set_last_token( res ) unless StillIgnoreToken===res}
|
1010
1055
|
assert !(IgnoreToken===@last_operative_token)
|
1011
1056
|
state=:expect_op
|
1012
1057
|
@in_def_name=true
|
1013
|
-
|
1058
|
+
while true
|
1014
1059
|
|
1015
1060
|
#look for start of parameter list
|
1016
1061
|
nc=(@moretokens.empty? ? nextchar.chr : @moretokens.first.to_s[0,1])
|
@@ -1041,7 +1086,7 @@ private
|
|
1041
1086
|
when /^(\.|::)$/.token_pat
|
1042
1087
|
lexerror tok,'expected ident' unless state==:expect_op
|
1043
1088
|
if endofs
|
1044
|
-
result.insert -2, ImplicitParamListEndToken.new(endofs)
|
1089
|
+
result.insert( -2, ImplicitParamListEndToken.new(endofs) )
|
1045
1090
|
endofs=nil
|
1046
1091
|
end
|
1047
1092
|
state=:expect_name
|
@@ -1049,9 +1094,9 @@ private
|
|
1049
1094
|
ctx.state=:def_body
|
1050
1095
|
state==:expect_op or lexerror tok,'expected identifier'
|
1051
1096
|
if endofs
|
1052
|
-
result.insert -2,ImplicitParamListEndToken.new(tok.offset)
|
1097
|
+
result.insert( -2,ImplicitParamListEndToken.new(tok.offset) )
|
1053
1098
|
end
|
1054
|
-
result.insert -2, EndHeaderToken.new(tok.offset)
|
1099
|
+
result.insert( -2, EndHeaderToken.new(tok.offset) )
|
1055
1100
|
break
|
1056
1101
|
else
|
1057
1102
|
lexerror(tok, "bizarre token in def name: " +
|
@@ -1059,10 +1104,13 @@ private
|
|
1059
1104
|
end
|
1060
1105
|
end
|
1061
1106
|
@in_def_name=false
|
1062
|
-
}
|
1063
|
-
|
1107
|
+
#}
|
1108
|
+
@moretokens= old_moretokens.concat @moretokens
|
1109
|
+
return result
|
1110
|
+
end
|
1111
|
+
def keyword_alias(str,offset,result)
|
1064
1112
|
safe_recurse { |a|
|
1065
|
-
set_last_token KeywordToken.new "alias" #hack
|
1113
|
+
set_last_token KeywordToken.new( "alias" )#hack
|
1066
1114
|
result.concat ignored_tokens
|
1067
1115
|
res=symbol(eat_next_if(?:),false)
|
1068
1116
|
unless res
|
@@ -1070,7 +1118,7 @@ private
|
|
1070
1118
|
else
|
1071
1119
|
res.ident[0]==?$ and res=VarNameToken.new(res.ident,res.offset)
|
1072
1120
|
result<< res
|
1073
|
-
set_last_token KeywordToken.new "alias" #hack
|
1121
|
+
set_last_token KeywordToken.new( "alias" )#hack
|
1074
1122
|
result.concat ignored_tokens
|
1075
1123
|
res=symbol(eat_next_if(?:),false)
|
1076
1124
|
unless res
|
@@ -1081,10 +1129,12 @@ private
|
|
1081
1129
|
end
|
1082
1130
|
end
|
1083
1131
|
}
|
1084
|
-
|
1132
|
+
return result
|
1133
|
+
end
|
1134
|
+
def keyword_undef(str,offset,result)
|
1085
1135
|
safe_recurse { |a|
|
1086
1136
|
loop do
|
1087
|
-
set_last_token KeywordToken.new "," #hack
|
1137
|
+
set_last_token KeywordToken.new( "," )#hack
|
1088
1138
|
result.concat ignored_tokens
|
1089
1139
|
tok=symbol(eat_next_if(?:),false)
|
1090
1140
|
tok or lexerror(result.first,"bad symbol in undef")
|
@@ -1101,18 +1151,22 @@ private
|
|
1101
1151
|
end
|
1102
1152
|
}
|
1103
1153
|
|
1154
|
+
return result
|
1155
|
+
end
|
1104
1156
|
# when "defined?"
|
1105
1157
|
#defined? might have a baresymbol following it
|
1106
1158
|
#does it need to be handled specially?
|
1107
1159
|
#it would seem not.....
|
1108
1160
|
|
1109
|
-
|
1161
|
+
def keyword_when(str,offset,result)
|
1110
1162
|
#abort_noparens! emits EndToken on leaving context
|
1111
1163
|
result.unshift(*abort_noparens!(str))
|
1112
1164
|
result.push KwParamListStartToken.new( offset+str.length)
|
1113
1165
|
@parsestack.push WhenParamListContext.new(str,@linenum)
|
1166
|
+
return result
|
1167
|
+
end
|
1114
1168
|
|
1115
|
-
|
1169
|
+
def keyword_rescue(str,offset,result)
|
1116
1170
|
unless after_nonid_op? {false}
|
1117
1171
|
#rescue needs to be treated differently when in operator context...
|
1118
1172
|
#i think no RescueSMContext should be pushed on the stack...
|
@@ -1124,8 +1178,10 @@ private
|
|
1124
1178
|
@parsestack.push RescueSMContext.new(@linenum)
|
1125
1179
|
result.unshift(*abort_noparens!(str))
|
1126
1180
|
end
|
1181
|
+
return result
|
1182
|
+
end
|
1127
1183
|
|
1128
|
-
|
1184
|
+
def keyword_then(str,offset,result)
|
1129
1185
|
result.unshift(*abort_noparens!(str))
|
1130
1186
|
@parsestack.last.see self,:then
|
1131
1187
|
|
@@ -1133,24 +1189,38 @@ private
|
|
1133
1189
|
@parsestack.pop
|
1134
1190
|
else #error... does anyone care?
|
1135
1191
|
end
|
1192
|
+
return result
|
1193
|
+
end
|
1136
1194
|
|
1137
|
-
|
1195
|
+
def keyword_in(str,offset,result)
|
1138
1196
|
result.unshift KwParamListEndToken.new( offset)
|
1139
1197
|
result.unshift(*abort_noparens!(str))
|
1140
1198
|
@parsestack.last.see self,:in
|
1199
|
+
return result
|
1200
|
+
end
|
1141
1201
|
|
1142
|
-
|
1202
|
+
def _keyword_innerbounding(str,offset,result)
|
1143
1203
|
result.unshift(*abort_noparens!(str))
|
1204
|
+
return result
|
1205
|
+
end
|
1206
|
+
for kw in BINOPWORDLIST+INNERBOUNDINGWORDLIST-["in","then","rescue","when","elsif"]
|
1207
|
+
alias_method "keyword_#{kw}".to_sym, :_keyword_innerbounding
|
1208
|
+
end
|
1144
1209
|
|
1145
|
-
|
1210
|
+
def keyword_return(str,offset,result)
|
1146
1211
|
fail if KeywordToken===@last_operative_token and @last_operative_token===/\A(\.|::)\Z/
|
1147
1212
|
tok=KeywordToken.new(str,offset)
|
1148
1213
|
result=yield tok
|
1149
1214
|
result[0]=tok
|
1150
1215
|
tok.has_no_block!
|
1216
|
+
return result
|
1217
|
+
end
|
1218
|
+
|
1219
|
+
alias keyword_break keyword_return
|
1220
|
+
alias keyword_next keyword_return
|
1151
1221
|
|
1152
1222
|
|
1153
|
-
|
1223
|
+
def keyword_END(str,offset,result)
|
1154
1224
|
#END could be treated, lexically, just as if it is an
|
1155
1225
|
#ordinary method, except that local vars created in
|
1156
1226
|
#END blocks are visible to subsequent code. (Why??)
|
@@ -1161,7 +1231,7 @@ private
|
|
1161
1231
|
safe_recurse{
|
1162
1232
|
old=result.first
|
1163
1233
|
result=[
|
1164
|
-
|
1234
|
+
KeywordToken.new(old.ident,old.offset),
|
1165
1235
|
ImplicitParamListStartToken.new(input_position),
|
1166
1236
|
ImplicitParamListEndToken.new(input_position),
|
1167
1237
|
*ignored_tokens
|
@@ -1173,20 +1243,31 @@ private
|
|
1173
1243
|
@parsestack.push BeginEndContext.new(str,offset)
|
1174
1244
|
}
|
1175
1245
|
end
|
1246
|
+
return result
|
1247
|
+
end
|
1176
1248
|
|
1177
|
-
when FUNCLIKE_KEYWORDS
|
1178
|
-
result=yield MethNameToken.new(str) #should be a keyword token?
|
1179
|
-
|
1180
|
-
when RUBYKEYWORDS
|
1181
|
-
#do nothing
|
1182
|
-
|
1183
|
-
else result=yield MethNameToken.new(str)
|
1184
|
-
|
1185
|
-
end
|
1186
1249
|
|
1187
|
-
|
1250
|
+
def _keyword_funclike(str,offset,result)
|
1251
|
+
if @last_operative_token===/^(\.|::)$/
|
1252
|
+
result=yield MethNameToken.new(str) #should pass a methname token here
|
1253
|
+
else
|
1254
|
+
result=yield KeywordToken.new(str)
|
1255
|
+
end
|
1256
|
+
return result
|
1257
|
+
end
|
1258
|
+
for kw in FUNCLIKE_KEYWORDLIST-["END","return","break","next"] do
|
1259
|
+
alias_method "keyword_#{kw}".to_sym, :_keyword_funclike
|
1260
|
+
end
|
1261
|
+
|
1262
|
+
def _keyword_varlike(str,offset,result)
|
1263
|
+
#do nothing
|
1264
|
+
return result
|
1265
|
+
end
|
1266
|
+
for kw in VARLIKE_KEYWORDLIST+["defined?", "not"] do
|
1267
|
+
alias_method "keyword_#{kw}".to_sym, :_keyword_varlike
|
1188
1268
|
end
|
1189
1269
|
|
1270
|
+
private
|
1190
1271
|
|
1191
1272
|
#-----------------------------------
|
1192
1273
|
def parsestack_lastnonassign_is?(obj)
|
@@ -1221,7 +1302,7 @@ private
|
|
1221
1302
|
#-----------------------------------
|
1222
1303
|
def block_param_list_lookahead
|
1223
1304
|
safe_recurse{ |la|
|
1224
|
-
set_last_token KeywordToken.new ';'
|
1305
|
+
set_last_token KeywordToken.new( ';' )
|
1225
1306
|
a=ignored_tokens
|
1226
1307
|
|
1227
1308
|
if eat_next_if(?|)
|
@@ -1267,7 +1348,7 @@ end
|
|
1267
1348
|
end
|
1268
1349
|
end
|
1269
1350
|
|
1270
|
-
set_last_token KeywordToken.new ';'
|
1351
|
+
set_last_token KeywordToken.new( ';' )
|
1271
1352
|
#a.concat ignored_tokens
|
1272
1353
|
|
1273
1354
|
#assert @last_operative_token===';'
|
@@ -1313,7 +1394,7 @@ end
|
|
1313
1394
|
alias === call
|
1314
1395
|
end
|
1315
1396
|
|
1316
|
-
set_last_token KeywordToken.new ',' #hack
|
1397
|
+
set_last_token KeywordToken.new( ',' )#hack
|
1317
1398
|
#read local parameter names
|
1318
1399
|
nextvar=nil
|
1319
1400
|
loop do
|
@@ -1348,7 +1429,7 @@ end
|
|
1348
1429
|
when /^[&*]$/.token_pat #unary form...
|
1349
1430
|
#a NoWsToken is also expected... read it now
|
1350
1431
|
result.concat maybe_no_ws_token #not needed?
|
1351
|
-
set_last_token KeywordToken.new ','
|
1432
|
+
set_last_token KeywordToken.new( ',' )
|
1352
1433
|
else
|
1353
1434
|
lexerror tok,"unfamiliar var name '#{tok}'"
|
1354
1435
|
end
|
@@ -1382,7 +1463,7 @@ end
|
|
1382
1463
|
# !(NewlineToken===@last_operative_token) and
|
1383
1464
|
# !(/^(end|;)$/===@last_operative_token)
|
1384
1465
|
#result<<EndHeaderToken.new(result.last.offset+result.last.to_s.size)
|
1385
|
-
set_last_token KeywordToken.new ';'
|
1466
|
+
set_last_token KeywordToken.new( ';' )
|
1386
1467
|
result<< get1token
|
1387
1468
|
# end
|
1388
1469
|
}
|
@@ -1767,7 +1848,7 @@ end
|
|
1767
1848
|
end
|
1768
1849
|
|
1769
1850
|
@offset_adjust=@min_offset_adjust
|
1770
|
-
@moretokens.push *optional_here_bodies
|
1851
|
+
@moretokens.push( *optional_here_bodies )
|
1771
1852
|
ln=@linenum
|
1772
1853
|
@moretokens.push lexerror(EscNlToken.new(@filename,ln-1,result,input_position-result.size), error),
|
1773
1854
|
FileAndLineToken.new(@filename,ln,input_position)
|
@@ -1854,7 +1935,7 @@ end
|
|
1854
1935
|
back1char #-1 to make newline char the next to read
|
1855
1936
|
@linenum-=1
|
1856
1937
|
|
1857
|
-
assert /[\r\n]/===nextchar.chr
|
1938
|
+
assert( /[\r\n]/===nextchar.chr )
|
1858
1939
|
|
1859
1940
|
#retr evrything til next nl
|
1860
1941
|
if FASTER_STRING_ESCAPES
|
@@ -2253,9 +2334,9 @@ end
|
|
2253
2334
|
@parsestack.last.see self,:arrow
|
2254
2335
|
when '': #plain assignment: record local variable definitions
|
2255
2336
|
last_context_not_implicit.lhs=false
|
2256
|
-
@moretokens.push *ignored_tokens(true).map{|x|
|
2337
|
+
@moretokens.push( *ignored_tokens(true).map{|x|
|
2257
2338
|
NewlineToken===x ? EscNlToken.new(@filename,@linenum,x.ident,x.offset) : x
|
2258
|
-
}
|
2339
|
+
} )
|
2259
2340
|
@parsestack.push AssignmentRhsContext.new(@linenum)
|
2260
2341
|
if eat_next_if ?*
|
2261
2342
|
tok=OperatorToken.new('*', input_position-1)
|
@@ -2401,7 +2482,7 @@ end
|
|
2401
2482
|
# 'need to find matching callsite context and end it if implicit'
|
2402
2483
|
lasttok=last_operative_token
|
2403
2484
|
if !(lasttok===')' and lasttok.callsite?) #or ParamListContextNoParen===parsestack.last
|
2404
|
-
@moretokens.push *(abort_1_noparen!(1).push tokch)
|
2485
|
+
@moretokens.push( *(abort_1_noparen!(1).push tokch) )
|
2405
2486
|
tokch=@moretokens.shift
|
2406
2487
|
end
|
2407
2488
|
#=end
|
@@ -2475,6 +2556,14 @@ end
|
|
2475
2556
|
#-----------------------------------
|
2476
2557
|
def comma(ch)
|
2477
2558
|
@moretokens.push token=single_char_token(ch)
|
2559
|
+
|
2560
|
+
#if assignment rhs seen inside method param list, when param list, array or hash literal,
|
2561
|
+
# rescue where comma is expected, or method def param list
|
2562
|
+
# then end the assignment rhs now
|
2563
|
+
#+[OBS,ParamListContext|ParamListContextNoParen|WhenParamListContext|ListImmedContext|
|
2564
|
+
# (RescueSMContext&-{:state=>:rescue})|(DefContext&-{:in_body=>FalseClass|nil}),
|
2565
|
+
# AssignmentRhsContext
|
2566
|
+
#]===@parsestack
|
2478
2567
|
if AssignmentRhsContext===@parsestack[-1] and
|
2479
2568
|
ParamListContext===@parsestack[-2] ||
|
2480
2569
|
ParamListContextNoParen===@parsestack[-2] ||
|
@@ -2487,9 +2576,9 @@ end
|
|
2487
2576
|
end
|
2488
2577
|
token.comma_type=
|
2489
2578
|
case @parsestack[-1]
|
2490
|
-
when AssignmentRhsContext
|
2491
|
-
when ParamListContext,ParamListContextNoParen
|
2492
|
-
when ListImmedContext
|
2579
|
+
when AssignmentRhsContext; :rhs
|
2580
|
+
when ParamListContext,ParamListContextNoParen; :call
|
2581
|
+
when ListImmedContext; :array
|
2493
2582
|
else
|
2494
2583
|
:lhs if comma_in_lvalue_list?
|
2495
2584
|
end
|