gherkin 0.0.4-i386-mswin32

Sign up to get free protection for your applications and to get access to all the features.
Files changed (79) hide show
  1. data/.gitignore +7 -0
  2. data/LICENSE +20 -0
  3. data/README.rdoc +66 -0
  4. data/Rakefile +49 -0
  5. data/VERSION.yml +4 -0
  6. data/bin/gherkin +10 -0
  7. data/cucumber.yml +3 -0
  8. data/features/feature_parser.feature +206 -0
  9. data/features/native_lexer.feature +19 -0
  10. data/features/parser_with_native_lexer.feature +205 -0
  11. data/features/pretty_printer.feature +14 -0
  12. data/features/step_definitions/gherkin_steps.rb +34 -0
  13. data/features/step_definitions/pretty_printer_steps.rb +56 -0
  14. data/features/steps_parser.feature +46 -0
  15. data/features/support/env.rb +33 -0
  16. data/gherkin.gemspec +180 -0
  17. data/java/.gitignore +2 -0
  18. data/java/Gherkin.iml +24 -0
  19. data/java/build.xml +13 -0
  20. data/java/src/gherkin/FixJava.java +34 -0
  21. data/java/src/gherkin/Lexer.java +5 -0
  22. data/java/src/gherkin/LexingError.java +7 -0
  23. data/java/src/gherkin/Listener.java +27 -0
  24. data/java/src/gherkin/ParseError.java +22 -0
  25. data/java/src/gherkin/Parser.java +185 -0
  26. data/java/src/gherkin/lexer/.gitignore +1 -0
  27. data/java/src/gherkin/parser/StateMachineReader.java +62 -0
  28. data/lib/.gitignore +4 -0
  29. data/lib/gherkin.rb +2 -0
  30. data/lib/gherkin/c_lexer.rb +10 -0
  31. data/lib/gherkin/core_ext/array.rb +5 -0
  32. data/lib/gherkin/i18n.yml +535 -0
  33. data/lib/gherkin/i18n_lexer.rb +29 -0
  34. data/lib/gherkin/java_lexer.rb +10 -0
  35. data/lib/gherkin/lexer.rb +43 -0
  36. data/lib/gherkin/parser.rb +19 -0
  37. data/lib/gherkin/parser/meta.txt +4 -0
  38. data/lib/gherkin/parser/root.txt +9 -0
  39. data/lib/gherkin/parser/steps.txt +3 -0
  40. data/lib/gherkin/rb_lexer.rb +10 -0
  41. data/lib/gherkin/rb_lexer/.gitignore +1 -0
  42. data/lib/gherkin/rb_lexer/README.rdoc +8 -0
  43. data/lib/gherkin/rb_parser.rb +117 -0
  44. data/lib/gherkin/tools/pretty_printer.rb +83 -0
  45. data/nativegems.sh +5 -0
  46. data/ragel/i18n/.gitignore +1 -0
  47. data/ragel/lexer.c.rl.erb +401 -0
  48. data/ragel/lexer.java.rl.erb +200 -0
  49. data/ragel/lexer.rb.rl.erb +171 -0
  50. data/ragel/lexer_common.rl.erb +46 -0
  51. data/spec/gherkin/c_lexer_spec.rb +21 -0
  52. data/spec/gherkin/fixtures/1.feature +8 -0
  53. data/spec/gherkin/fixtures/complex.feature +43 -0
  54. data/spec/gherkin/fixtures/i18n_fr.feature +13 -0
  55. data/spec/gherkin/fixtures/i18n_no.feature +6 -0
  56. data/spec/gherkin/fixtures/i18n_zh-CN.feature +8 -0
  57. data/spec/gherkin/fixtures/simple.feature +3 -0
  58. data/spec/gherkin/fixtures/simple_with_comments.feature +7 -0
  59. data/spec/gherkin/fixtures/simple_with_tags.feature +11 -0
  60. data/spec/gherkin/i18n_spec.rb +57 -0
  61. data/spec/gherkin/java_lexer_spec.rb +20 -0
  62. data/spec/gherkin/parser_spec.rb +28 -0
  63. data/spec/gherkin/rb_lexer_spec.rb +18 -0
  64. data/spec/gherkin/sexp_recorder.rb +29 -0
  65. data/spec/gherkin/shared/lexer_spec.rb +433 -0
  66. data/spec/gherkin/shared/py_string_spec.rb +124 -0
  67. data/spec/gherkin/shared/table_spec.rb +97 -0
  68. data/spec/gherkin/shared/tags_spec.rb +50 -0
  69. data/spec/spec_helper.rb +53 -0
  70. data/tasks/bench.rake +193 -0
  71. data/tasks/bench/feature_builder.rb +49 -0
  72. data/tasks/bench/generated/.gitignore +1 -0
  73. data/tasks/bench/null_listener.rb +4 -0
  74. data/tasks/compile.rake +70 -0
  75. data/tasks/cucumber.rake +20 -0
  76. data/tasks/ragel_task.rb +83 -0
  77. data/tasks/rdoc.rake +12 -0
  78. data/tasks/rspec.rake +15 -0
  79. metadata +214 -0
@@ -0,0 +1,200 @@
1
+ package gherkin.lexer;
2
+
3
+ import java.util.List;
4
+ import java.util.ArrayList;
5
+ import java.util.regex.Pattern;
6
+ import gherkin.Lexer;
7
+ import gherkin.Listener;
8
+ import gherkin.LexingError;
9
+
10
+ public class <%= @i18n.capitalize %> implements Lexer {
11
+ %%{
12
+ machine lexer;
13
+ alphtype byte;
14
+
15
+ action begin_content {
16
+ contentStart = p;
17
+ currentLine = lineNumber;
18
+ }
19
+
20
+ action start_pystring {
21
+ currentLine = lineNumber;
22
+ startCol = p - lastNewline;
23
+ }
24
+
25
+ action begin_pystring_content {
26
+ contentStart = p;
27
+ }
28
+
29
+ action store_pystring_content {
30
+ String con = unindent(startCol, substring(data, contentStart, nextKeywordStart-1).replaceFirst("(\\r?\\n)?( )*\\Z", ""));
31
+ listener.py_string(con, currentLine);
32
+ }
33
+
34
+ action store_feature_content {
35
+ String con = multilineStrip(keywordContent(data, p, eof, nextKeywordStart, contentStart).trim());
36
+ listener.feature(keyword, con, currentLine);
37
+ if(nextKeywordStart != -1) p = nextKeywordStart - 1;
38
+ nextKeywordStart = -1;
39
+ }
40
+
41
+ action store_background_content {
42
+ String con = multilineStrip(keywordContent(data, p, eof, nextKeywordStart, contentStart));
43
+ listener.background(keyword, con, currentLine);
44
+ if(nextKeywordStart != -1) p = nextKeywordStart - 1;
45
+ nextKeywordStart = -1;
46
+ }
47
+
48
+ action store_scenario_content {
49
+ String con = multilineStrip(keywordContent(data, p, eof, nextKeywordStart, contentStart));
50
+ listener.scenario(keyword, con, currentLine);
51
+ if(nextKeywordStart != -1) p = nextKeywordStart - 1;
52
+ nextKeywordStart = -1;
53
+ }
54
+
55
+ action store_scenario_outline_content {
56
+ String con = multilineStrip(keywordContent(data, p, eof, nextKeywordStart, contentStart));
57
+ listener.scenario_outline(keyword, con, currentLine);
58
+ if(nextKeywordStart != -1) p = nextKeywordStart - 1;
59
+ nextKeywordStart = -1;
60
+ }
61
+
62
+ action store_examples_content {
63
+ String con = multilineStrip(keywordContent(data, p, eof, nextKeywordStart, contentStart));
64
+ listener.examples(keyword, con, currentLine);
65
+ if(nextKeywordStart != -1) p = nextKeywordStart - 1;
66
+ nextKeywordStart = -1;
67
+ }
68
+
69
+ action store_step_content {
70
+ listener.step(keyword, substring(data, contentStart, p).trim(), currentLine);
71
+ }
72
+
73
+ action store_comment_content {
74
+ listener.comment(substring(data, contentStart, p).trim(), lineNumber);
75
+ keywordStart = -1;
76
+ }
77
+
78
+ action store_tag_content {
79
+ listener.tag(substring(data, contentStart, p).trim(), currentLine);
80
+ keywordStart = -1;
81
+ }
82
+
83
+ action inc_line_number {
84
+ lineNumber++;
85
+ }
86
+
87
+ action last_newline {
88
+ lastNewline = p + 1;
89
+ }
90
+
91
+ action start_keyword {
92
+ if(keywordStart == -1) keywordStart = p;
93
+ }
94
+
95
+ action end_keyword {
96
+ keyword = substring(data, keywordStart, p).replaceFirst(":$","").trim();
97
+ keywordStart = -1;
98
+ }
99
+
100
+ action next_keyword_start {
101
+ nextKeywordStart = p;
102
+ }
103
+
104
+ action start_table {
105
+ p = p - 1;
106
+ rows = new ArrayList<List<String>>();
107
+ currentLine = lineNumber;
108
+ }
109
+
110
+ action start_row {
111
+ currentRow = new ArrayList<String>();
112
+ }
113
+
114
+ action begin_cell_content {
115
+ contentStart = p;
116
+ }
117
+
118
+ action store_cell_content {
119
+ currentRow.add(substring(data, contentStart, p).trim());
120
+ }
121
+
122
+ action store_row {
123
+ rows.add(currentRow);
124
+ }
125
+
126
+ action store_table {
127
+ if(!rows.isEmpty()) {
128
+ listener.table(rows, currentLine);
129
+ }
130
+ }
131
+
132
+ action end_feature {
133
+ if(cs < lexer_first_final) {
134
+ String content = currentLineContent(data, lastNewline);
135
+ throw new LexingError("Lexing error on line " + lineNumber);
136
+ }
137
+ }
138
+
139
+ include lexer_common "lexer_common.<%= @i18n %>.rl";
140
+ }%%
141
+
142
+ private final Listener listener;
143
+
144
+ public <%= @i18n.capitalize %>(Listener listener) {
145
+ this.listener = listener;
146
+ }
147
+
148
+ %% write data noerror;
149
+
150
+ public void scan(CharSequence inputSequence) {
151
+ String input = inputSequence.toString() + "\n%_FEATURE_END_%";
152
+ byte[] data = input.getBytes();
153
+ int cs, p = 0, pe = data.length;
154
+ int eof = pe;
155
+
156
+ int lineNumber = 1;
157
+ int lastNewline = 0;
158
+
159
+ int contentStart = -1;
160
+ int currentLine = -1;
161
+ int startCol = -1;
162
+ int nextKeywordStart = -1;
163
+ int keywordStart = -1;
164
+ String keyword = null;
165
+ List<List<String>> rows = null;
166
+ List<String> currentRow = null;
167
+
168
+ %% write init;
169
+ %% write exec;
170
+ }
171
+
172
+ private String keywordContent(byte[] data, int p, int eof, int nextKeywordStart, int contentStart) {
173
+ int endPoint = (nextKeywordStart == -1 || (p == eof)) ? p : nextKeywordStart;
174
+ return substring(data, contentStart, endPoint);
175
+ }
176
+
177
+ private String multilineStrip(String text) {
178
+ StringBuffer result = new StringBuffer();
179
+ for(String s : text.split("\n")) {
180
+ result.append(s.trim()).append("\n");
181
+ }
182
+ return result.toString().trim();
183
+ }
184
+
185
+ private String unindent(int startCol, String text) {
186
+ return Pattern.compile("^ {0," + startCol + "}", Pattern.MULTILINE).matcher(text).replaceAll("");
187
+ }
188
+
189
+ private String currentLineContent(byte[] data, int lastNewline) {
190
+ return substring(data, lastNewline, data.length).trim();
191
+ }
192
+
193
+ private String substring(byte[] data, int start, int end) {
194
+ try {
195
+ return new String(data, start, end-start, "utf-8");
196
+ } catch(java.io.UnsupportedEncodingException e) {
197
+ throw new RuntimeException("Internal error", e);
198
+ }
199
+ }
200
+ }
@@ -0,0 +1,171 @@
1
+ require 'gherkin/core_ext/array'
2
+
3
+ module Gherkin
4
+ module RbLexer
5
+ class <%= @i18n.capitalize %> #:nodoc:
6
+ %%{
7
+ machine lexer;
8
+
9
+ action begin_content {
10
+ @content_start = p
11
+ @current_line = @line_number
12
+ }
13
+
14
+ action start_pystring {
15
+ @current_line = @line_number
16
+ @start_col = p - @last_newline
17
+ }
18
+
19
+ action begin_pystring_content {
20
+ @content_start = p
21
+ }
22
+
23
+ action store_pystring_content {
24
+ con = unindent(@start_col, data[@content_start...@next_keyword_start-1].utf8_pack("c*").sub(/(\r?\n)?( )*\Z/, ''))
25
+ @listener.py_string(con, @current_line)
26
+ }
27
+
28
+ action store_feature_content {
29
+ store_keyword_content(:feature, data, p, eof) { |con| multiline_strip(con) }
30
+ p = @next_keyword_start - 1 if @next_keyword_start
31
+ @next_keyword_start = nil
32
+ }
33
+
34
+ action store_background_content {
35
+ store_keyword_content(:background, data, p, eof) { |con| multiline_strip(con) }
36
+ p = @next_keyword_start - 1 if @next_keyword_start
37
+ @next_keyword_start = nil
38
+ }
39
+
40
+ action store_scenario_content {
41
+ store_keyword_content(:scenario, data, p, eof) { |con| multiline_strip(con) }
42
+ p = @next_keyword_start - 1 if @next_keyword_start
43
+ @next_keyword_start = nil
44
+ }
45
+
46
+ action store_scenario_outline_content {
47
+ store_keyword_content(:scenario_outline, data, p, eof) { |con| multiline_strip(con) }
48
+ p = @next_keyword_start - 1 if @next_keyword_start
49
+ @next_keyword_start = nil
50
+ }
51
+
52
+ action store_examples_content {
53
+ store_keyword_content(:examples, data, p, eof) { |con| multiline_strip(con) }
54
+ p = @next_keyword_start - 1 if @next_keyword_start
55
+ @next_keyword_start = nil
56
+ }
57
+
58
+ action store_step_content {
59
+ con = data[@content_start...p].utf8_pack("c*").strip
60
+ @listener.step(@keyword, con, @current_line)
61
+ }
62
+
63
+ action store_comment_content {
64
+ con = data[@content_start...p].utf8_pack("c*").strip
65
+ @listener.comment(con, @line_number)
66
+ @keyword_start = nil
67
+ }
68
+
69
+ action store_tag_content {
70
+ con = data[@content_start...p].utf8_pack("c*").strip
71
+ @listener.tag(con, @current_line)
72
+ @keyword_start = nil
73
+ }
74
+
75
+ action inc_line_number {
76
+ @line_number += 1
77
+ }
78
+
79
+ action last_newline {
80
+ @last_newline = p + 1
81
+ }
82
+
83
+ action start_keyword {
84
+ @keyword_start ||= p
85
+ }
86
+
87
+ action end_keyword {
88
+ @keyword = data[@keyword_start...p].utf8_pack("c*").sub(/:$/,'').strip
89
+ @keyword_start = nil
90
+ }
91
+
92
+ action next_keyword_start {
93
+ @next_keyword_start = p
94
+ }
95
+
96
+ action start_table {
97
+ p = p - 1
98
+ @rows = []
99
+ @current_line = @line_number
100
+ }
101
+
102
+ action start_row {
103
+ current_row = []
104
+ }
105
+
106
+ action begin_cell_content {
107
+ @content_start = p
108
+ }
109
+
110
+ action store_cell_content {
111
+ con = data[@content_start...p].utf8_pack("c*").strip
112
+ current_row << con
113
+ }
114
+
115
+ action store_row {
116
+ @rows << current_row
117
+ }
118
+
119
+ action store_table {
120
+ if @rows.size != 0
121
+ @listener.table(@rows, @current_line)
122
+ end
123
+ }
124
+
125
+ action end_feature {
126
+ if cs < lexer_first_final
127
+ content = current_line_content(data, p)
128
+ raise Lexer::LexingError.new("Lexing error on line %d: '%s'." % [@line_number, content])
129
+ end
130
+ }
131
+
132
+ include lexer_common "lexer_common.<%= @i18n %>.rl";
133
+ }%%
134
+
135
+ def initialize(listener)
136
+ @listener = listener
137
+ %% write data;
138
+ end
139
+
140
+ def scan(data)
141
+ data = (data + "\n%_FEATURE_END_%").unpack("c*") # Explicit EOF simplifies things considerably
142
+ eof = pe = data.length
143
+
144
+ @line_number = 1
145
+ @last_newline = 0
146
+
147
+ %% write init;
148
+ %% write exec;
149
+ end
150
+
151
+ def multiline_strip(text)
152
+ text.split("\n").map{|s| s.strip}.join("\n").strip
153
+ end
154
+
155
+ def unindent(startcol, text)
156
+ text.gsub(/^ {0,#{startcol}}/, "")
157
+ end
158
+
159
+ def store_keyword_content(event, data, p, eof)
160
+ end_point = (!@next_keyword_start or (p == eof)) ? p : @next_keyword_start
161
+ con = yield data[@content_start...end_point].utf8_pack("c*")
162
+ @listener.send(event, @keyword, con, @current_line)
163
+ end
164
+
165
+ def current_line_content(data, p)
166
+ rest = data[@last_newline..-1]
167
+ rest[0..rest.index(10)||-1].utf8_pack("c*").strip
168
+ end
169
+ end
170
+ end
171
+ end
@@ -0,0 +1,46 @@
1
+ %%{
2
+ machine lexer_common;
3
+
4
+ # Language specific
5
+ I18N_Feature = <%= keywords['feature'] %> >start_keyword %end_keyword;
6
+ I18N_Background = <%= keywords['background'] %> >start_keyword %end_keyword;
7
+ I18N_ScenarioOutline = <%= keywords['scenario_outline'] %> >start_keyword %end_keyword;
8
+ I18N_Scenario = <%= keywords['scenario'] %> >start_keyword %end_keyword;
9
+ I18N_Step = (<%= keywords['given'] %> | <%= keywords['when'] %> | <%= keywords['and'] %> | <%= keywords['then'] %> | <%= keywords['but'] %>) >start_keyword %end_keyword;
10
+ I18N_Examples = <%= keywords['examples'] %> >start_keyword %end_keyword;
11
+
12
+ EOF = '%_FEATURE_END_%'; # Explicit EOF added before scanning begins
13
+ EOL = ('\r'? '\n') @inc_line_number @last_newline;
14
+
15
+ FeatureHeadingEnd = EOL+ space* (I18N_Background | I18N_Scenario | I18N_ScenarioOutline | '@' | '#' | EOF) >next_keyword_start;
16
+ ScenarioHeadingEnd = EOL+ space* ( I18N_Scenario | I18N_ScenarioOutline | I18N_Step | '@' | '#' | EOF ) >next_keyword_start;
17
+ BackgroundHeadingEnd = EOL+ space* ( I18N_Scenario | I18N_ScenarioOutline | I18N_Step | '@' | '#'| EOF ) >next_keyword_start;
18
+ ScenarioOutlineHeadingEnd = EOL+ space* ( I18N_Scenario | I18N_Step | '@' | '#' | EOF ) >next_keyword_start;
19
+ ExamplesHeadingEnd = EOL+ space* '|' >next_keyword_start;
20
+
21
+ FeatureHeading = space* I18N_Feature %begin_content ^FeatureHeadingEnd* :>> FeatureHeadingEnd @store_feature_content;
22
+ BackgroundHeading = space* I18N_Background %begin_content ^BackgroundHeadingEnd* :>> BackgroundHeadingEnd @store_background_content;
23
+ ScenarioHeading = space* I18N_Scenario %begin_content ^ScenarioHeadingEnd* :>> ScenarioHeadingEnd @store_scenario_content;
24
+ ScenarioOutlineHeading = space* I18N_ScenarioOutline %begin_content ^ScenarioOutlineHeadingEnd* :>> ScenarioOutlineHeadingEnd @store_scenario_outline_content;
25
+ ExamplesHeading = space* I18N_Examples %begin_content ^ExamplesHeadingEnd* :>> ExamplesHeadingEnd @store_examples_content;
26
+
27
+ Step = space* I18N_Step %begin_content ^EOL+ %store_step_content EOL+;
28
+ Comment = space* '#' >begin_content ^EOL* %store_comment_content EOL+;
29
+
30
+ Tag = ( '@' [^@\r\n\t ]+ >begin_content ) %store_tag_content;
31
+ Tags = space* (Tag space*)+ EOL+;
32
+
33
+ StartTable = space* '|' >start_table;
34
+ EndTable = EOL space* ^('|') >next_keyword_start;
35
+ Cell = '|' (any - '|')* >begin_cell_content %store_cell_content;
36
+ Row = space* Cell* >start_row '|' :>> (space* EOL+ space*) %store_row;
37
+ Table = StartTable :>> Row+ %store_table <: EndTable?;
38
+
39
+ StartPyString = '"""' >start_pystring space* :>> EOL;
40
+ EndPyString = (space* '"""') >next_keyword_start;
41
+ PyString = space* StartPyString %begin_pystring_content (^EOL | EOL)* :>> EndPyString %store_pystring_content space* EOL+;
42
+
43
+ Tokens = (space | EOL)* (Tags | Comment | FeatureHeading | BackgroundHeading | ScenarioHeading | ScenarioOutlineHeading | ExamplesHeading | Step | Table | PyString)* (space | EOL)* EOF;
44
+
45
+ main := Tokens %end_feature @!end_feature;
46
+ }%%
@@ -0,0 +1,21 @@
1
+ #encoding: utf-8
2
+ unless defined?(JRUBY_VERSION)
3
+ require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
4
+ require 'gherkin/c_lexer'
5
+
6
+ module Gherkin
7
+ module Lexer
8
+ describe "C Lexer" do
9
+ before do
10
+ @listener = Gherkin::SexpRecorder.new
11
+ @lexer = Gherkin::CLexer['en'].new(@listener)
12
+ end
13
+
14
+ it_should_behave_like "a Gherkin lexer"
15
+ it_should_behave_like "a Gherkin lexer lexing tags"
16
+ it_should_behave_like "a Gherkin lexer lexing py_strings"
17
+ it_should_behave_like "a Gherkin lexer lexing tables"
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,8 @@
1
+ Feature: Logging in
2
+ So that I can be myself
3
+ # Comment
4
+ Scenario: Anonymous user can get a login form.
5
+ Scenery here
6
+
7
+ @tag
8
+ Scenario: Another one