js2 0.0.10 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Manifest +42 -0
- data/README.md +65 -0
- data/Rakefile +19 -35
- data/bin/js2 +80 -66
- data/config/js2.yml +2 -0
- data/js2.gemspec +33 -0
- data/lib/js2/{haml_parser.rb → parser/haml.rb} +2 -2
- data/lib/js2/{haml_engine.rb → parser/haml_engine.rb} +1 -1
- data/lib/js2/parser/lexer.rb +37 -0
- data/lib/js2/{parser.rb → parser/tokenizer.rb} +157 -143
- data/lib/js2/{replace.rb → ragel/helper.rb} +16 -5
- data/lib/js2/ragel/tokenizer.rl +561 -0
- data/lib/js2/{tokenizer.rl.erb → ragel/tokenizer.rl.erb} +12 -19
- data/lib/js2/standard/factory.rb +289 -0
- data/lib/js2/standard/node.rb +75 -0
- data/lib/js2/util/compilation.rb +77 -0
- data/lib/js2/util/config.rb +84 -0
- data/lib/js2/util/exec.rb +34 -0
- data/lib/js2/util/file_handler.rb +73 -0
- data/lib/js2/{js2bootstrap.js2 → util/js2bootstrap.js2} +12 -68
- data/lib/js2/util/processor.rb +88 -0
- data/lib/js2/util/rdoc.rb +35 -0
- data/lib/js2/{sel_decorator.rb → util/sel_decorator.rb} +11 -1
- data/lib/js2.rb +22 -45
- data/test/compiled/bar.js +3 -0
- data/test/compiled/basic.comp.js +31 -0
- data/test/compiled/basic.js +27 -0
- data/test/compiled/foo.js +3 -0
- data/test/fixtures/bar.js2 +3 -0
- data/test/fixtures/basic.js2 +27 -0
- data/test/fixtures/basic.js2.haml +4 -0
- data/test/fixtures/basic.js2.yml +5 -0
- data/test/fixtures/curry.js2 +5 -0
- data/test/fixtures/foo.js2 +3 -0
- data/test/fixtures/member.js2 +14 -0
- data/test/fixtures/private.js2 +5 -0
- data/test/fixtures/property.js2 +4 -0
- data/test/test_helper.rb +25 -0
- data/test/test_js2.rb +43 -0
- data/wiki/features.md +106 -0
- data/wiki/installation.md +53 -0
- metadata +89 -83
- data/Changelog +0 -33
- data/History.txt +0 -4
- data/Manifest.txt +0 -35
- data/PostInstall.txt +0 -7
- data/README +0 -69
- data/README.rdoc +0 -69
- data/README.txt +0 -69
- data/examples/js2.yml +0 -8
- data/examples/test.yml +0 -5
- data/lib/javascript/sel_marker.js2 +0 -150
- data/lib/javascript/test.js2 +0 -73
- data/lib/js2/config.rb +0 -39
- data/lib/js2/daemon.rb +0 -35
- data/lib/js2/file_handler.rb +0 -91
- data/lib/js2/foo.js2.haml +0 -3
- data/lib/js2/js2.js +0 -110
- data/lib/js2/processor.rb +0 -112
- data/lib/js2/test/selenium.rb +0 -119
- data/lib/js2/test/selenium_element.rb +0 -234
- data/lib/js2/test/selenium_helper.rb +0 -27
- data/lib/js2/tree.rb +0 -351
- data/lib/js2/universe.rb +0 -123
- data/lib/tasks/js2.rake +0 -9
- data/website/index.txt +0 -86
- /data/{LICENSE → lib/js2/standard/class_node.rb} +0 -0
@@ -1,21 +1,23 @@
|
|
1
|
+
|
1
2
|
#line 1 "tokenizer.rl"
|
2
3
|
# Somewhat based on http://www.mozilla.org/js/language/js20-2000-07/formal/lexer-grammar.html
|
3
4
|
# Regular Expression Literals determined with these rules:
|
4
5
|
# http://www.mozilla.org/js/language/js20-1999-03-25/tokens.html
|
5
6
|
|
6
|
-
|
7
|
+
|
8
|
+
#line 423 "tokenizer.rl"
|
7
9
|
|
8
10
|
|
9
11
|
require 'rubygems'
|
10
12
|
require 'inline'
|
11
13
|
|
12
|
-
class JS2::Parser
|
14
|
+
class JS2::Parser::Tokenizer
|
13
15
|
attr_accessor :data
|
14
16
|
|
15
17
|
inline do |builder|
|
16
18
|
builder.c_raw <<-END
|
17
19
|
|
18
|
-
int
|
20
|
+
int tokenize (int argc, VALUE *argv, VALUE self) {
|
19
21
|
// convert ruby string to char*
|
20
22
|
VALUE r_str = argv[0];
|
21
23
|
int data_length = RSTRING(r_str)->len;
|
@@ -108,7 +110,7 @@ class JS2::Parser
|
|
108
110
|
|
109
111
|
|
110
112
|
|
111
|
-
#line
|
113
|
+
#line 114 "tokenizer.c"
|
112
114
|
static const unsigned char _dude_actions[] = {
|
113
115
|
0, 1, 1, 1, 2, 1, 3, 1,
|
114
116
|
4, 1, 5, 1, 6, 1, 7, 1,
|
@@ -2467,18 +2469,20 @@ static const int dude_error = -1;
|
|
2467
2469
|
|
2468
2470
|
static const int dude_en_main = 452;
|
2469
2471
|
|
2470
|
-
|
2472
|
+
|
2473
|
+
#line 527 "tokenizer.rl"
|
2471
2474
|
|
2472
|
-
#line
|
2475
|
+
#line 2476 "tokenizer.c"
|
2473
2476
|
{
|
2474
2477
|
cs = dude_start;
|
2475
2478
|
ts = 0;
|
2476
2479
|
te = 0;
|
2477
2480
|
act = 0;
|
2478
2481
|
}
|
2479
|
-
|
2482
|
+
|
2483
|
+
#line 528 "tokenizer.rl"
|
2480
2484
|
|
2481
|
-
#line
|
2485
|
+
#line 2486 "tokenizer.c"
|
2482
2486
|
{
|
2483
2487
|
int _klen;
|
2484
2488
|
unsigned int _trans;
|
@@ -2497,7 +2501,7 @@ _resume:
|
|
2497
2501
|
#line 1 "tokenizer.rl"
|
2498
2502
|
{ts = p;}
|
2499
2503
|
break;
|
2500
|
-
#line
|
2504
|
+
#line 2505 "tokenizer.c"
|
2501
2505
|
}
|
2502
2506
|
}
|
2503
2507
|
|
@@ -2608,23 +2612,23 @@ _eof_trans:
|
|
2608
2612
|
{act = 13;}
|
2609
2613
|
break;
|
2610
2614
|
case 12:
|
2611
|
-
#line
|
2615
|
+
#line 277 "tokenizer.rl"
|
2612
2616
|
{act = 15;}
|
2613
2617
|
break;
|
2614
2618
|
case 13:
|
2615
|
-
#line
|
2619
|
+
#line 278 "tokenizer.rl"
|
2616
2620
|
{act = 16;}
|
2617
2621
|
break;
|
2618
2622
|
case 14:
|
2619
|
-
#line
|
2623
|
+
#line 281 "tokenizer.rl"
|
2620
2624
|
{act = 18;}
|
2621
2625
|
break;
|
2622
2626
|
case 15:
|
2623
|
-
#line
|
2627
|
+
#line 282 "tokenizer.rl"
|
2624
2628
|
{act = 19;}
|
2625
2629
|
break;
|
2626
2630
|
case 16:
|
2627
|
-
#line
|
2631
|
+
#line 379 "tokenizer.rl"
|
2628
2632
|
{act = 113;}
|
2629
2633
|
break;
|
2630
2634
|
case 17:
|
@@ -2661,15 +2665,15 @@ _eof_trans:
|
|
2661
2665
|
}}
|
2662
2666
|
break;
|
2663
2667
|
case 19:
|
2664
|
-
#line
|
2668
|
+
#line 276 "tokenizer.rl"
|
2665
2669
|
{te = p+1;}
|
2666
2670
|
break;
|
2667
2671
|
case 20:
|
2668
|
-
#line
|
2672
|
+
#line 280 "tokenizer.rl"
|
2669
2673
|
{te = p+1;{ }}
|
2670
2674
|
break;
|
2671
2675
|
case 21:
|
2672
|
-
#line
|
2676
|
+
#line 379 "tokenizer.rl"
|
2673
2677
|
{te = p+1;{
|
2674
2678
|
char single = data[ts-data];
|
2675
2679
|
|
@@ -2832,402 +2836,411 @@ _eof_trans:
|
|
2832
2836
|
break;
|
2833
2837
|
case 30:
|
2834
2838
|
#line 265 "tokenizer.rl"
|
2835
|
-
{te = p;p--;{
|
2839
|
+
{te = p;p--;{
|
2840
|
+
start_argv[0] = sym_COMMENT;
|
2841
|
+
start_argv[1] = INT2FIX(ts-data);
|
2842
|
+
start_argv[2] = INT2FIX(is_static);
|
2843
|
+
rb_funcall2(self, start_sym, 3, start_argv);
|
2844
|
+
is_static = 0;
|
2845
|
+
stop_argv[0] = INT2FIX(te-data);
|
2846
|
+
rb_funcall2(self, stop_sym, 1, stop_argv);
|
2847
|
+
|
2848
|
+
}}
|
2836
2849
|
break;
|
2837
2850
|
case 31:
|
2838
|
-
#line
|
2851
|
+
#line 277 "tokenizer.rl"
|
2839
2852
|
{te = p;p--;}
|
2840
2853
|
break;
|
2841
2854
|
case 32:
|
2842
|
-
#line
|
2855
|
+
#line 278 "tokenizer.rl"
|
2843
2856
|
{te = p;p--;{ }}
|
2844
2857
|
break;
|
2845
2858
|
case 33:
|
2846
|
-
#line
|
2859
|
+
#line 280 "tokenizer.rl"
|
2847
2860
|
{te = p;p--;{ }}
|
2848
2861
|
break;
|
2849
2862
|
case 34:
|
2850
|
-
#line
|
2863
|
+
#line 281 "tokenizer.rl"
|
2851
2864
|
{te = p;p--;{ }}
|
2852
2865
|
break;
|
2853
2866
|
case 35:
|
2854
|
-
#line
|
2867
|
+
#line 282 "tokenizer.rl"
|
2855
2868
|
{te = p;p--;{ }}
|
2856
2869
|
break;
|
2857
2870
|
case 36:
|
2858
|
-
#line
|
2871
|
+
#line 284 "tokenizer.rl"
|
2859
2872
|
{te = p;p--;{ }}
|
2860
2873
|
break;
|
2861
2874
|
case 37:
|
2862
|
-
#line
|
2875
|
+
#line 285 "tokenizer.rl"
|
2863
2876
|
{te = p;p--;{ }}
|
2864
2877
|
break;
|
2865
2878
|
case 38:
|
2866
|
-
#line
|
2879
|
+
#line 286 "tokenizer.rl"
|
2867
2880
|
{te = p;p--;{ }}
|
2868
2881
|
break;
|
2869
2882
|
case 39:
|
2870
|
-
#line
|
2883
|
+
#line 287 "tokenizer.rl"
|
2871
2884
|
{te = p;p--;{ }}
|
2872
2885
|
break;
|
2873
2886
|
case 40:
|
2874
|
-
#line
|
2887
|
+
#line 288 "tokenizer.rl"
|
2875
2888
|
{te = p;p--;{ }}
|
2876
2889
|
break;
|
2877
2890
|
case 41:
|
2878
|
-
#line
|
2891
|
+
#line 289 "tokenizer.rl"
|
2879
2892
|
{te = p;p--;{ }}
|
2880
2893
|
break;
|
2881
2894
|
case 42:
|
2882
|
-
#line
|
2895
|
+
#line 290 "tokenizer.rl"
|
2883
2896
|
{te = p;p--;{ }}
|
2884
2897
|
break;
|
2885
2898
|
case 43:
|
2886
|
-
#line
|
2899
|
+
#line 291 "tokenizer.rl"
|
2887
2900
|
{te = p;p--;{ }}
|
2888
2901
|
break;
|
2889
2902
|
case 44:
|
2890
|
-
#line
|
2903
|
+
#line 292 "tokenizer.rl"
|
2891
2904
|
{te = p;p--;{ }}
|
2892
2905
|
break;
|
2893
2906
|
case 45:
|
2894
|
-
#line
|
2907
|
+
#line 293 "tokenizer.rl"
|
2895
2908
|
{te = p;p--;{ }}
|
2896
2909
|
break;
|
2897
2910
|
case 46:
|
2898
|
-
#line
|
2911
|
+
#line 294 "tokenizer.rl"
|
2899
2912
|
{te = p;p--;{ }}
|
2900
2913
|
break;
|
2901
2914
|
case 47:
|
2902
|
-
#line
|
2915
|
+
#line 295 "tokenizer.rl"
|
2903
2916
|
{te = p;p--;{ }}
|
2904
2917
|
break;
|
2905
2918
|
case 48:
|
2906
|
-
#line
|
2919
|
+
#line 296 "tokenizer.rl"
|
2907
2920
|
{te = p;p--;{ }}
|
2908
2921
|
break;
|
2909
2922
|
case 49:
|
2910
|
-
#line
|
2923
|
+
#line 297 "tokenizer.rl"
|
2911
2924
|
{te = p;p--;{ }}
|
2912
2925
|
break;
|
2913
2926
|
case 50:
|
2914
|
-
#line
|
2927
|
+
#line 298 "tokenizer.rl"
|
2915
2928
|
{te = p;p--;{ }}
|
2916
2929
|
break;
|
2917
2930
|
case 51:
|
2918
|
-
#line
|
2931
|
+
#line 299 "tokenizer.rl"
|
2919
2932
|
{te = p;p--;{ }}
|
2920
2933
|
break;
|
2921
2934
|
case 52:
|
2922
|
-
#line
|
2935
|
+
#line 300 "tokenizer.rl"
|
2923
2936
|
{te = p;p--;{ }}
|
2924
2937
|
break;
|
2925
2938
|
case 53:
|
2926
|
-
#line
|
2939
|
+
#line 301 "tokenizer.rl"
|
2927
2940
|
{te = p;p--;{ }}
|
2928
2941
|
break;
|
2929
2942
|
case 54:
|
2930
|
-
#line
|
2943
|
+
#line 302 "tokenizer.rl"
|
2931
2944
|
{te = p;p--;{ }}
|
2932
2945
|
break;
|
2933
2946
|
case 55:
|
2934
|
-
#line
|
2947
|
+
#line 303 "tokenizer.rl"
|
2935
2948
|
{te = p;p--;{ }}
|
2936
2949
|
break;
|
2937
2950
|
case 56:
|
2938
|
-
#line
|
2951
|
+
#line 304 "tokenizer.rl"
|
2939
2952
|
{te = p;p--;{ }}
|
2940
2953
|
break;
|
2941
2954
|
case 57:
|
2942
|
-
#line
|
2955
|
+
#line 305 "tokenizer.rl"
|
2943
2956
|
{te = p;p--;{ }}
|
2944
2957
|
break;
|
2945
2958
|
case 58:
|
2946
|
-
#line
|
2959
|
+
#line 306 "tokenizer.rl"
|
2947
2960
|
{te = p;p--;{ }}
|
2948
2961
|
break;
|
2949
2962
|
case 59:
|
2950
|
-
#line
|
2963
|
+
#line 307 "tokenizer.rl"
|
2951
2964
|
{te = p;p--;{ }}
|
2952
2965
|
break;
|
2953
2966
|
case 60:
|
2954
|
-
#line
|
2967
|
+
#line 308 "tokenizer.rl"
|
2955
2968
|
{te = p;p--;{ }}
|
2956
2969
|
break;
|
2957
2970
|
case 61:
|
2958
|
-
#line
|
2971
|
+
#line 309 "tokenizer.rl"
|
2959
2972
|
{te = p;p--;{ }}
|
2960
2973
|
break;
|
2961
2974
|
case 62:
|
2962
|
-
#line
|
2975
|
+
#line 310 "tokenizer.rl"
|
2963
2976
|
{te = p;p--;{ }}
|
2964
2977
|
break;
|
2965
2978
|
case 63:
|
2966
|
-
#line
|
2979
|
+
#line 311 "tokenizer.rl"
|
2967
2980
|
{te = p;p--;{ }}
|
2968
2981
|
break;
|
2969
2982
|
case 64:
|
2970
|
-
#line
|
2983
|
+
#line 312 "tokenizer.rl"
|
2971
2984
|
{te = p;p--;{ }}
|
2972
2985
|
break;
|
2973
2986
|
case 65:
|
2974
|
-
#line
|
2987
|
+
#line 313 "tokenizer.rl"
|
2975
2988
|
{te = p;p--;{ }}
|
2976
2989
|
break;
|
2977
2990
|
case 66:
|
2978
|
-
#line
|
2991
|
+
#line 314 "tokenizer.rl"
|
2979
2992
|
{te = p;p--;{ }}
|
2980
2993
|
break;
|
2981
2994
|
case 67:
|
2982
|
-
#line
|
2995
|
+
#line 315 "tokenizer.rl"
|
2983
2996
|
{te = p;p--;{ }}
|
2984
2997
|
break;
|
2985
2998
|
case 68:
|
2986
|
-
#line
|
2999
|
+
#line 316 "tokenizer.rl"
|
2987
3000
|
{te = p;p--;{ }}
|
2988
3001
|
break;
|
2989
3002
|
case 69:
|
2990
|
-
#line
|
3003
|
+
#line 317 "tokenizer.rl"
|
2991
3004
|
{te = p;p--;{ }}
|
2992
3005
|
break;
|
2993
3006
|
case 70:
|
2994
|
-
#line
|
3007
|
+
#line 318 "tokenizer.rl"
|
2995
3008
|
{te = p;p--;{ }}
|
2996
3009
|
break;
|
2997
3010
|
case 71:
|
2998
|
-
#line
|
3011
|
+
#line 319 "tokenizer.rl"
|
2999
3012
|
{te = p;p--;{ }}
|
3000
3013
|
break;
|
3001
3014
|
case 72:
|
3002
|
-
#line
|
3015
|
+
#line 320 "tokenizer.rl"
|
3003
3016
|
{te = p;p--;{ }}
|
3004
3017
|
break;
|
3005
3018
|
case 73:
|
3006
|
-
#line
|
3019
|
+
#line 321 "tokenizer.rl"
|
3007
3020
|
{te = p;p--;{ }}
|
3008
3021
|
break;
|
3009
3022
|
case 74:
|
3010
|
-
#line
|
3023
|
+
#line 322 "tokenizer.rl"
|
3011
3024
|
{te = p;p--;{ }}
|
3012
3025
|
break;
|
3013
3026
|
case 75:
|
3014
|
-
#line
|
3027
|
+
#line 323 "tokenizer.rl"
|
3015
3028
|
{te = p;p--;{ }}
|
3016
3029
|
break;
|
3017
3030
|
case 76:
|
3018
|
-
#line
|
3031
|
+
#line 324 "tokenizer.rl"
|
3019
3032
|
{te = p;p--;{ }}
|
3020
3033
|
break;
|
3021
3034
|
case 77:
|
3022
|
-
#line
|
3035
|
+
#line 325 "tokenizer.rl"
|
3023
3036
|
{te = p;p--;{ }}
|
3024
3037
|
break;
|
3025
3038
|
case 78:
|
3026
|
-
#line
|
3039
|
+
#line 326 "tokenizer.rl"
|
3027
3040
|
{te = p;p--;{ }}
|
3028
3041
|
break;
|
3029
3042
|
case 79:
|
3030
|
-
#line
|
3043
|
+
#line 327 "tokenizer.rl"
|
3031
3044
|
{te = p;p--;{ }}
|
3032
3045
|
break;
|
3033
3046
|
case 80:
|
3034
|
-
#line
|
3047
|
+
#line 328 "tokenizer.rl"
|
3035
3048
|
{te = p;p--;{ }}
|
3036
3049
|
break;
|
3037
3050
|
case 81:
|
3038
|
-
#line
|
3051
|
+
#line 329 "tokenizer.rl"
|
3039
3052
|
{te = p;p--;{ }}
|
3040
3053
|
break;
|
3041
3054
|
case 82:
|
3042
|
-
#line
|
3055
|
+
#line 330 "tokenizer.rl"
|
3043
3056
|
{te = p;p--;{ }}
|
3044
3057
|
break;
|
3045
3058
|
case 83:
|
3046
|
-
#line
|
3059
|
+
#line 331 "tokenizer.rl"
|
3047
3060
|
{te = p;p--;{ }}
|
3048
3061
|
break;
|
3049
3062
|
case 84:
|
3050
|
-
#line
|
3063
|
+
#line 332 "tokenizer.rl"
|
3051
3064
|
{te = p;p--;{ }}
|
3052
3065
|
break;
|
3053
3066
|
case 85:
|
3054
|
-
#line
|
3067
|
+
#line 333 "tokenizer.rl"
|
3055
3068
|
{te = p;p--;{ }}
|
3056
3069
|
break;
|
3057
3070
|
case 86:
|
3058
|
-
#line
|
3071
|
+
#line 334 "tokenizer.rl"
|
3059
3072
|
{te = p;p--;{ }}
|
3060
3073
|
break;
|
3061
3074
|
case 87:
|
3062
|
-
#line
|
3075
|
+
#line 335 "tokenizer.rl"
|
3063
3076
|
{te = p;p--;{ }}
|
3064
3077
|
break;
|
3065
3078
|
case 88:
|
3066
|
-
#line
|
3079
|
+
#line 336 "tokenizer.rl"
|
3067
3080
|
{te = p;p--;{ }}
|
3068
3081
|
break;
|
3069
3082
|
case 89:
|
3070
|
-
#line
|
3083
|
+
#line 337 "tokenizer.rl"
|
3071
3084
|
{te = p;p--;{ }}
|
3072
3085
|
break;
|
3073
3086
|
case 90:
|
3074
|
-
#line
|
3087
|
+
#line 338 "tokenizer.rl"
|
3075
3088
|
{te = p;p--;{ }}
|
3076
3089
|
break;
|
3077
3090
|
case 91:
|
3078
|
-
#line
|
3091
|
+
#line 339 "tokenizer.rl"
|
3079
3092
|
{te = p;p--;{ }}
|
3080
3093
|
break;
|
3081
3094
|
case 92:
|
3082
|
-
#line
|
3095
|
+
#line 340 "tokenizer.rl"
|
3083
3096
|
{te = p;p--;{ }}
|
3084
3097
|
break;
|
3085
3098
|
case 93:
|
3086
|
-
#line
|
3099
|
+
#line 341 "tokenizer.rl"
|
3087
3100
|
{te = p;p--;{ }}
|
3088
3101
|
break;
|
3089
3102
|
case 94:
|
3090
|
-
#line
|
3103
|
+
#line 342 "tokenizer.rl"
|
3091
3104
|
{te = p;p--;{ }}
|
3092
3105
|
break;
|
3093
3106
|
case 95:
|
3094
|
-
#line
|
3107
|
+
#line 343 "tokenizer.rl"
|
3095
3108
|
{te = p;p--;{ }}
|
3096
3109
|
break;
|
3097
3110
|
case 96:
|
3098
|
-
#line
|
3111
|
+
#line 344 "tokenizer.rl"
|
3099
3112
|
{te = p;p--;{ }}
|
3100
3113
|
break;
|
3101
3114
|
case 97:
|
3102
|
-
#line
|
3115
|
+
#line 345 "tokenizer.rl"
|
3103
3116
|
{te = p;p--;{ }}
|
3104
3117
|
break;
|
3105
3118
|
case 98:
|
3106
|
-
#line
|
3119
|
+
#line 346 "tokenizer.rl"
|
3107
3120
|
{te = p;p--;{ }}
|
3108
3121
|
break;
|
3109
3122
|
case 99:
|
3110
|
-
#line
|
3123
|
+
#line 347 "tokenizer.rl"
|
3111
3124
|
{te = p;p--;{ }}
|
3112
3125
|
break;
|
3113
3126
|
case 100:
|
3114
|
-
#line
|
3127
|
+
#line 348 "tokenizer.rl"
|
3115
3128
|
{te = p;p--;{ }}
|
3116
3129
|
break;
|
3117
3130
|
case 101:
|
3118
|
-
#line
|
3131
|
+
#line 349 "tokenizer.rl"
|
3119
3132
|
{te = p;p--;{ }}
|
3120
3133
|
break;
|
3121
3134
|
case 102:
|
3122
|
-
#line
|
3135
|
+
#line 350 "tokenizer.rl"
|
3123
3136
|
{te = p;p--;{ }}
|
3124
3137
|
break;
|
3125
3138
|
case 103:
|
3126
|
-
#line
|
3139
|
+
#line 351 "tokenizer.rl"
|
3127
3140
|
{te = p;p--;{ }}
|
3128
3141
|
break;
|
3129
3142
|
case 104:
|
3130
|
-
#line
|
3143
|
+
#line 352 "tokenizer.rl"
|
3131
3144
|
{te = p;p--;{ }}
|
3132
3145
|
break;
|
3133
3146
|
case 105:
|
3134
|
-
#line
|
3147
|
+
#line 353 "tokenizer.rl"
|
3135
3148
|
{te = p;p--;{ }}
|
3136
3149
|
break;
|
3137
3150
|
case 106:
|
3138
|
-
#line
|
3151
|
+
#line 354 "tokenizer.rl"
|
3139
3152
|
{te = p;p--;{ }}
|
3140
3153
|
break;
|
3141
3154
|
case 107:
|
3142
|
-
#line
|
3155
|
+
#line 355 "tokenizer.rl"
|
3143
3156
|
{te = p;p--;{ }}
|
3144
3157
|
break;
|
3145
3158
|
case 108:
|
3146
|
-
#line
|
3159
|
+
#line 356 "tokenizer.rl"
|
3147
3160
|
{te = p;p--;{ }}
|
3148
3161
|
break;
|
3149
3162
|
case 109:
|
3150
|
-
#line
|
3163
|
+
#line 357 "tokenizer.rl"
|
3151
3164
|
{te = p;p--;{ }}
|
3152
3165
|
break;
|
3153
3166
|
case 110:
|
3154
|
-
#line
|
3167
|
+
#line 358 "tokenizer.rl"
|
3155
3168
|
{te = p;p--;{ }}
|
3156
3169
|
break;
|
3157
3170
|
case 111:
|
3158
|
-
#line
|
3171
|
+
#line 359 "tokenizer.rl"
|
3159
3172
|
{te = p;p--;{ }}
|
3160
3173
|
break;
|
3161
3174
|
case 112:
|
3162
|
-
#line
|
3175
|
+
#line 360 "tokenizer.rl"
|
3163
3176
|
{te = p;p--;{ }}
|
3164
3177
|
break;
|
3165
3178
|
case 113:
|
3166
|
-
#line
|
3179
|
+
#line 361 "tokenizer.rl"
|
3167
3180
|
{te = p;p--;{ }}
|
3168
3181
|
break;
|
3169
3182
|
case 114:
|
3170
|
-
#line
|
3183
|
+
#line 362 "tokenizer.rl"
|
3171
3184
|
{te = p;p--;{ }}
|
3172
3185
|
break;
|
3173
3186
|
case 115:
|
3174
|
-
#line
|
3187
|
+
#line 363 "tokenizer.rl"
|
3175
3188
|
{te = p;p--;{ }}
|
3176
3189
|
break;
|
3177
3190
|
case 116:
|
3178
|
-
#line
|
3191
|
+
#line 364 "tokenizer.rl"
|
3179
3192
|
{te = p;p--;{ }}
|
3180
3193
|
break;
|
3181
3194
|
case 117:
|
3182
|
-
#line
|
3195
|
+
#line 365 "tokenizer.rl"
|
3183
3196
|
{te = p;p--;{ }}
|
3184
3197
|
break;
|
3185
3198
|
case 118:
|
3186
|
-
#line
|
3199
|
+
#line 366 "tokenizer.rl"
|
3187
3200
|
{te = p;p--;{ }}
|
3188
3201
|
break;
|
3189
3202
|
case 119:
|
3190
|
-
#line
|
3203
|
+
#line 367 "tokenizer.rl"
|
3191
3204
|
{te = p;p--;{ }}
|
3192
3205
|
break;
|
3193
3206
|
case 120:
|
3194
|
-
#line
|
3207
|
+
#line 368 "tokenizer.rl"
|
3195
3208
|
{te = p;p--;{ }}
|
3196
3209
|
break;
|
3197
3210
|
case 121:
|
3198
|
-
#line
|
3211
|
+
#line 369 "tokenizer.rl"
|
3199
3212
|
{te = p;p--;{ }}
|
3200
3213
|
break;
|
3201
3214
|
case 122:
|
3202
|
-
#line
|
3215
|
+
#line 370 "tokenizer.rl"
|
3203
3216
|
{te = p;p--;{ }}
|
3204
3217
|
break;
|
3205
3218
|
case 123:
|
3206
|
-
#line
|
3219
|
+
#line 371 "tokenizer.rl"
|
3207
3220
|
{te = p;p--;{ }}
|
3208
3221
|
break;
|
3209
3222
|
case 124:
|
3210
|
-
#line
|
3223
|
+
#line 372 "tokenizer.rl"
|
3211
3224
|
{te = p;p--;{ }}
|
3212
3225
|
break;
|
3213
3226
|
case 125:
|
3214
|
-
#line
|
3227
|
+
#line 373 "tokenizer.rl"
|
3215
3228
|
{te = p;p--;{ }}
|
3216
3229
|
break;
|
3217
3230
|
case 126:
|
3218
|
-
#line
|
3231
|
+
#line 374 "tokenizer.rl"
|
3219
3232
|
{te = p;p--;{ }}
|
3220
3233
|
break;
|
3221
3234
|
case 127:
|
3222
|
-
#line
|
3235
|
+
#line 375 "tokenizer.rl"
|
3223
3236
|
{te = p;p--;{ }}
|
3224
3237
|
break;
|
3225
3238
|
case 128:
|
3226
|
-
#line
|
3239
|
+
#line 376 "tokenizer.rl"
|
3227
3240
|
{te = p;p--;{ }}
|
3228
3241
|
break;
|
3229
3242
|
case 129:
|
3230
|
-
#line
|
3243
|
+
#line 379 "tokenizer.rl"
|
3231
3244
|
{te = p;p--;{
|
3232
3245
|
char single = data[ts-data];
|
3233
3246
|
|
@@ -3314,27 +3327,27 @@ _eof_trans:
|
|
3314
3327
|
}}
|
3315
3328
|
break;
|
3316
3329
|
case 133:
|
3317
|
-
#line
|
3330
|
+
#line 277 "tokenizer.rl"
|
3318
3331
|
{{p = ((te))-1;}}
|
3319
3332
|
break;
|
3320
3333
|
case 134:
|
3321
|
-
#line
|
3334
|
+
#line 278 "tokenizer.rl"
|
3322
3335
|
{{p = ((te))-1;}{ }}
|
3323
3336
|
break;
|
3324
3337
|
case 135:
|
3325
|
-
#line
|
3338
|
+
#line 280 "tokenizer.rl"
|
3326
3339
|
{{p = ((te))-1;}{ }}
|
3327
3340
|
break;
|
3328
3341
|
case 136:
|
3329
|
-
#line
|
3342
|
+
#line 281 "tokenizer.rl"
|
3330
3343
|
{{p = ((te))-1;}{ }}
|
3331
3344
|
break;
|
3332
3345
|
case 137:
|
3333
|
-
#line
|
3346
|
+
#line 282 "tokenizer.rl"
|
3334
3347
|
{{p = ((te))-1;}{ }}
|
3335
3348
|
break;
|
3336
3349
|
case 138:
|
3337
|
-
#line
|
3350
|
+
#line 379 "tokenizer.rl"
|
3338
3351
|
{{p = ((te))-1;}{
|
3339
3352
|
char single = data[ts-data];
|
3340
3353
|
|
@@ -3402,7 +3415,16 @@ _eof_trans:
|
|
3402
3415
|
{{p = ((te))-1;}}
|
3403
3416
|
break;
|
3404
3417
|
case 13:
|
3405
|
-
{{p = ((te))-1;}
|
3418
|
+
{{p = ((te))-1;}
|
3419
|
+
start_argv[0] = sym_COMMENT;
|
3420
|
+
start_argv[1] = INT2FIX(ts-data);
|
3421
|
+
start_argv[2] = INT2FIX(is_static);
|
3422
|
+
rb_funcall2(self, start_sym, 3, start_argv);
|
3423
|
+
is_static = 0;
|
3424
|
+
stop_argv[0] = INT2FIX(te-data);
|
3425
|
+
rb_funcall2(self, stop_sym, 1, stop_argv);
|
3426
|
+
|
3427
|
+
}
|
3406
3428
|
break;
|
3407
3429
|
case 15:
|
3408
3430
|
{{p = ((te))-1;}}
|
@@ -3463,7 +3485,7 @@ _eof_trans:
|
|
3463
3485
|
}
|
3464
3486
|
}
|
3465
3487
|
break;
|
3466
|
-
#line
|
3488
|
+
#line 3489 "tokenizer.c"
|
3467
3489
|
}
|
3468
3490
|
}
|
3469
3491
|
|
@@ -3476,7 +3498,7 @@ _again:
|
|
3476
3498
|
#line 1 "tokenizer.rl"
|
3477
3499
|
{ts = 0;}
|
3478
3500
|
break;
|
3479
|
-
#line
|
3501
|
+
#line 3502 "tokenizer.c"
|
3480
3502
|
}
|
3481
3503
|
}
|
3482
3504
|
|
@@ -3492,7 +3514,8 @@ _again:
|
|
3492
3514
|
}
|
3493
3515
|
|
3494
3516
|
}
|
3495
|
-
|
3517
|
+
|
3518
|
+
#line 529 "tokenizer.rl"
|
3496
3519
|
|
3497
3520
|
if (curly_idx) curly_idx--;
|
3498
3521
|
stop_argv[0] = INT2FIX(data_length-1);
|
@@ -3508,30 +3531,21 @@ _again:
|
|
3508
3531
|
end
|
3509
3532
|
|
3510
3533
|
def start_node (type, idx, is_static)
|
3511
|
-
@
|
3534
|
+
@lexer.start_node(type, idx, is_static == 1)
|
3512
3535
|
end
|
3513
3536
|
|
3514
3537
|
def stop_node (idx)
|
3515
|
-
@
|
3538
|
+
@lexer.stop_node(idx)
|
3516
3539
|
end
|
3517
3540
|
|
3518
3541
|
def mark_node (idx)
|
3519
|
-
@
|
3520
|
-
end
|
3521
|
-
|
3522
|
-
|
3523
|
-
def parse (data)
|
3524
|
-
@data = data
|
3525
|
-
@ph = JS2::ParserHelper.new(data)
|
3526
|
-
self.do_parse(data)
|
3527
|
-
return @ph
|
3542
|
+
@lexer.mark_node(idx)
|
3528
3543
|
end
|
3529
3544
|
|
3530
|
-
def
|
3531
|
-
data
|
3532
|
-
|
3533
|
-
|
3534
|
-
return ph
|
3545
|
+
def tokenize! (data, lexer)
|
3546
|
+
@data = data
|
3547
|
+
@lexer = lexer
|
3548
|
+
self.tokenize(data)
|
3535
3549
|
end
|
3536
3550
|
|
3537
3551
|
end
|