RbYAML 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. data/LICENSE +19 -0
  2. data/README +31 -0
  3. data/lib/rbyaml.rb +378 -0
  4. data/lib/rbyaml/composer.rb +189 -0
  5. data/lib/rbyaml/constructor.rb +374 -0
  6. data/lib/rbyaml/detector.rb +44 -0
  7. data/lib/rbyaml/dumper.rb +40 -0
  8. data/lib/rbyaml/emitter.rb +1116 -0
  9. data/lib/rbyaml/error.rb +81 -0
  10. data/lib/rbyaml/events.rb +92 -0
  11. data/lib/rbyaml/loader.rb +49 -0
  12. data/lib/rbyaml/nodes.rb +69 -0
  13. data/lib/rbyaml/parser.rb +488 -0
  14. data/lib/rbyaml/reader.rb +127 -0
  15. data/lib/rbyaml/representer.rb +183 -0
  16. data/lib/rbyaml/scanner.rb +1258 -0
  17. data/lib/rbyaml/serializer.rb +120 -0
  18. data/lib/rbyaml/test.rb +56 -0
  19. data/lib/rbyaml/tokens.rb +163 -0
  20. data/lib/rbyaml/yaml.rb +143 -0
  21. data/test/test_rbyaml.rb +18 -0
  22. data/test/yaml/gems.yml +130951 -0
  23. data/test/yaml/gems2.yml +113 -0
  24. data/test/yaml/test1.yml +3 -0
  25. data/test/yaml/test10.yml +8 -0
  26. data/test/yaml/test12.yml +8 -0
  27. data/test/yaml/test13.yml +4 -0
  28. data/test/yaml/test14.yml +4 -0
  29. data/test/yaml/test15.yml +8 -0
  30. data/test/yaml/test16.yml +7 -0
  31. data/test/yaml/test18.yml +6 -0
  32. data/test/yaml/test19.yml +5 -0
  33. data/test/yaml/test2.yml +3 -0
  34. data/test/yaml/test20.yml +6 -0
  35. data/test/yaml/test21.yml +4 -0
  36. data/test/yaml/test22.yml +4 -0
  37. data/test/yaml/test23.yml +13 -0
  38. data/test/yaml/test24.yml +14 -0
  39. data/test/yaml/test25.yml +7 -0
  40. data/test/yaml/test26.yml +7 -0
  41. data/test/yaml/test27.yml +29 -0
  42. data/test/yaml/test28.yml +26 -0
  43. data/test/yaml/test29.yml +13 -0
  44. data/test/yaml/test3.yml +8 -0
  45. data/test/yaml/test30.yml +7 -0
  46. data/test/yaml/test31.yml +2 -0
  47. data/test/yaml/test32.yml +13 -0
  48. data/test/yaml/test33.yml +2 -0
  49. data/test/yaml/test34.yml +8 -0
  50. data/test/yaml/test35.yml +4 -0
  51. data/test/yaml/test36.yml +8 -0
  52. data/test/yaml/test37.yml +2 -0
  53. data/test/yaml/test38.yml +8 -0
  54. data/test/yaml/test39.yml +2 -0
  55. data/test/yaml/test4.yml +8 -0
  56. data/test/yaml/test40.yml +3 -0
  57. data/test/yaml/test41.yml +5 -0
  58. data/test/yaml/test42.yml +12 -0
  59. data/test/yaml/test43.yml +15 -0
  60. data/test/yaml/test44.yml +23 -0
  61. data/test/yaml/test5.yml +3 -0
  62. data/test/yaml/test6.yml +5 -0
  63. data/test/yaml/test7.yml +10 -0
  64. data/test/yaml/test8.yml +10 -0
  65. data/test/yaml/test9.yml +8 -0
  66. metadata +111 -0
@@ -0,0 +1,81 @@
1
+
2
+ module RbYAML
3
+ class Mark
4
+ attr_reader :name,:index,:line,:column,:buffer,:pointer
5
+ def initialize(name, index, line, column, buffer, pointer)
6
+ @name = name
7
+ @index = index
8
+ @line = line
9
+ @column = column
10
+ @buffer = buffer
11
+ @pointer = pointer
12
+ end
13
+
14
+ def get_snippet(indent=4, max_length=75)
15
+ return nil if @buffer.nil?
16
+ head = ""
17
+ start = @pointer
18
+ while start > 0 && !"\0\r\n\x85".include?(@buffer[start-1])
19
+ start -= 1
20
+ if @pointer-start > max_length/2-1
21
+ head = " ... "
22
+ start += 5
23
+ break
24
+ end
25
+ end
26
+ tail = ""
27
+ tend = @pointer
28
+ while tend < @buffer.length && !"\0\r\n\x85".include?(@buffer[tend])
29
+ tend += 1
30
+ if tend-@pointer > max_length/2-1
31
+ tail = " ... "
32
+ tend -= 5
33
+ break
34
+ end
35
+ end
36
+ snippet = @buffer[start..tend]
37
+ ' ' * indent + "#{head}#{snippet}#{tail}\n" + ' '*(indent+@pointer-start+head.length) + ' '
38
+ end
39
+
40
+ def to_s
41
+ snippet = get_snippet()
42
+ where = " in \"#{@name}\", line #{@line+1}, column #{@column+1}"
43
+ if snippet
44
+ where << ":\n" << snippet
45
+ end
46
+ end
47
+ end
48
+
49
+ class YAMLError < StandardError
50
+ end
51
+
52
+ class TypeError < YAMLError
53
+ end
54
+
55
+ class MarkedYAMLError < YAMLError
56
+ def initialize(context=nil, context_mark=nil, problem=nil, problem_mark=nil, note=nil)
57
+ super()
58
+ @context = context
59
+ @context_mark = context_mark
60
+ @problem = problem
61
+ @problem_mark = problem_mark
62
+ @note = note
63
+ end
64
+
65
+ def to_s
66
+ lines = []
67
+
68
+ lines << @context if @context
69
+ if @context_mark && (@problem.nil? || @problem_mark.nil? ||
70
+ @context_mark.name != @problem_mark.name ||
71
+ @context_mark.line != @problem_mark.line ||
72
+ @context_mark.column != @problem_mark.column)
73
+ lines << @context_mark.to_s
74
+ end
75
+ lines << @problem if @problem
76
+ lines << @problem_mark.to_s if @problem_mark
77
+ lines << @note if @note
78
+ lines.join("\n")
79
+ end
80
+ end
81
+ end
@@ -0,0 +1,92 @@
1
+
2
+ module RbYAML
3
+ class Event
4
+ attr_reader :start_mark, :end_mark
5
+ def initialize(start_mark=nil,end_mark=nil)
6
+ @start_mark = start_mark
7
+ @end_mark = end_mark
8
+ end
9
+
10
+ def to_s
11
+ attributes = ["@anchor","@tag","@value"] & self.instance_variables
12
+ args = attributes.collect {|val| "#{val[1..-1]}=" + eval("#{val}").to_s}.join(", ")
13
+ "#{self.class.name}(#{args})"
14
+ end
15
+ end
16
+
17
+ class NodeEvent < Event
18
+ attr_reader :anchor
19
+ def initialize(anchor, start_mark=nil, end_mark=nil)
20
+ super(start_mark,end_mark)
21
+ @anchor = anchor
22
+ end
23
+ end
24
+
25
+ class CollectionStartEvent < NodeEvent
26
+ attr_reader :tag, :flow_style
27
+ def initialize(anchor,tag,start_mark=nil, end_mark=nil,flow_style=nil)
28
+ super(anchor,start_mark,end_mark)
29
+ @tag = tag
30
+ @flow_style = flow_style
31
+ end
32
+ end
33
+
34
+ class CollectionEndEvent < Event
35
+ end
36
+
37
+ class StreamStartEvent < Event
38
+ attr_reader :encoding
39
+ def initialize(start_mark=nil,end_mark=nil,encoding=nil)
40
+ super(start_mark,end_mark)
41
+ @encoding = encoding
42
+ end
43
+ end
44
+
45
+ class StreamEndEvent < Event
46
+ end
47
+
48
+ class DocumentStartEvent < Event
49
+ attr_reader :explicit, :version, :tags
50
+ def initialize(start_mark=nil,end_mark=nil,explicit=nil,version=nil,tags=nil)
51
+ super(start_mark,end_mark)
52
+ @explicit = explicit
53
+ @version = version
54
+ @tags = tags
55
+ end
56
+ end
57
+
58
+ class DocumentEndEvent < Event
59
+ attr_reader :explicit
60
+ def initialize(start_mark=nil,end_mark=nil,explicit=nil)
61
+ super(start_mark,end_mark)
62
+ @explicit = explicit
63
+ end
64
+ end
65
+
66
+ class AliasEvent < NodeEvent
67
+ end
68
+
69
+ class ScalarEvent < NodeEvent
70
+ attr_reader :tag, :style, :value, :implicit
71
+ def initialize(anchor,tag,implicit,value,start_mark=nil, end_mark=nil,style=nil)
72
+ super(anchor,start_mark,end_mark)
73
+ @tag = tag
74
+ @style = style
75
+ @value = value
76
+ @implicit = implicit
77
+ end
78
+ end
79
+
80
+ class SequenceStartEvent < CollectionStartEvent
81
+ end
82
+
83
+ class SequenceEndEvent < CollectionEndEvent
84
+ end
85
+
86
+ class MappingStartEvent < CollectionStartEvent
87
+ end
88
+
89
+ class MappingEndEvent < CollectionEndEvent
90
+ end
91
+ end
92
+
@@ -0,0 +1,49 @@
1
+ # This is a more or less straight translation of PyYAML3000 to Ruby
2
+
3
+ require 'rbyaml/reader'
4
+ require 'rbyaml/scanner'
5
+ require 'rbyaml/parser'
6
+ require 'rbyaml/composer'
7
+ require 'rbyaml/constructor'
8
+ require 'rbyaml/detector'
9
+
10
+ module RbYAML
11
+ class CommonLoader
12
+ include Reader, Scanner, Parser
13
+
14
+ def initialize(stream)
15
+ super()
16
+ initialize_reader(stream)
17
+ initialize_scanner
18
+ initialize_parser
19
+ end
20
+ end
21
+
22
+ class BaseLoader < CommonLoader
23
+ include BaseComposer, BaseConstructor, BaseDetector
24
+ def initialize(stream)
25
+ super
26
+ initialize_composer
27
+ initialize_constructor
28
+ end
29
+ end
30
+
31
+ class SafeLoader < CommonLoader
32
+ include Composer, SafeConstructor, Detector
33
+ def initialize(stream)
34
+ super
35
+ initialize_composer
36
+ initialize_constructor
37
+ end
38
+ end
39
+
40
+ class Loader < CommonLoader
41
+ include Composer, Constructor, Detector
42
+ def initialize(stream)
43
+ super
44
+ initialize_composer
45
+ initialize_constructor
46
+ end
47
+ end
48
+ end
49
+
@@ -0,0 +1,69 @@
1
+
2
+ module RbYAML
3
+ class Node
4
+ attr_accessor :tag, :value, :start_mark, :end_mark
5
+
6
+ def initialize(tag, value, start_mark, end_mark)
7
+ @tag = tag
8
+ @value = value
9
+ @start_mark = start_mark
10
+ @end_mark = end_mark
11
+ end
12
+
13
+ def to_s
14
+ value = @value
15
+ if Array === value
16
+ if value.empty?
17
+ value = "<empty>"
18
+ elsif value.length == 1
19
+ value = "<1 item>"
20
+ else
21
+ value = "#{value.length} items>"
22
+ end
23
+ else
24
+ if value.length > 75
25
+ value = value[0..70].to_s+"..."
26
+ else
27
+ value = value.to_s
28
+ end
29
+ end
30
+
31
+ "#{self.class.name}(tag=#{@tag}, value=#{value})"
32
+ end
33
+ end
34
+
35
+ class ScalarNode < Node
36
+ def tid
37
+ "scalar"
38
+ end
39
+
40
+ attr_accessor :style
41
+
42
+ def initialize(tag, value,start_mark=nil,end_mark=nil,style=nil)
43
+ super(tag,value,start_mark,end_mark)
44
+ @style = style
45
+ end
46
+ end
47
+
48
+ class CollectionNode < Node
49
+ attr_accessor :flow_style
50
+
51
+ def initialize(tag, value,start_mark=nil,end_mark=nil,flow_style=nil)
52
+ super(tag,value,start_mark,end_mark)
53
+ @flow_style = flow_style
54
+ end
55
+ end
56
+
57
+ class SequenceNode < CollectionNode
58
+ def tid
59
+ "sequence"
60
+ end
61
+ end
62
+
63
+ class MappingNode < CollectionNode
64
+ def tid
65
+ "mapping"
66
+ end
67
+ end
68
+ end
69
+
@@ -0,0 +1,488 @@
1
+
2
+ # YAML can be parsed by an LL(1) parser!
3
+ #
4
+ # We use the following production rules:
5
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
6
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END?
7
+ # implicit_document ::= block_node DOCUMENT-END?
8
+ # block_node ::= ALIAS | properties? block_content
9
+ # flow_node ::= ALIAS | properties? flow_content
10
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
11
+ # block_content ::= block_collection | flow_collection | SCALAR
12
+ # flow_content ::= flow_collection | SCALAR
13
+ # block_collection ::= block_sequence | block_mapping
14
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
15
+ # block_mapping ::= BLOCK-MAPPING_START ((KEY block_node_or_indentless_sequence?)? (VALUE block_node_or_indentless_sequence?)?)* BLOCK-END
16
+ # block_node_or_indentless_sequence ::= ALIAS | properties? (block_content | indentless_block_sequence)
17
+ # indentless_block_sequence ::= (BLOCK-ENTRY block_node?)+
18
+ # flow_collection ::= flow_sequence | flow_mapping
19
+ # flow_sequence ::= FLOW-SEQUENCE-START (flow_sequence_entry FLOW-ENTRY)* flow_sequence_entry? FLOW-SEQUENCE-END
20
+ # flow_mapping ::= FLOW-MAPPING-START (flow_mapping_entry FLOW-ENTRY)* flow_mapping_entry? FLOW-MAPPING-END
21
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
22
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
23
+
24
+ # TODO: support for BOM within a stream.
25
+ # stream ::= (BOM? implicit_document)? (BOM? explicit_document)* STREAM-END
26
+
27
+ # FIRST sets:
28
+ # stream: { STREAM-START }
29
+ # explicit_document: { DIRECTIVE DOCUMENT-START }
30
+ # implicit_document: FIRST(block_node)
31
+ # block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
32
+ # flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
33
+ # block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
34
+ # flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
35
+ # block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
36
+ # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
37
+ # block_sequence: { BLOCK-SEQUENCE-START }
38
+ # block_mapping: { BLOCK-MAPPING-START }
39
+ # block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
40
+ # indentless_sequence: { ENTRY }
41
+ # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
42
+ # flow_sequence: { FLOW-SEQUENCE-START }
43
+ # flow_mapping: { FLOW-MAPPING-START }
44
+ # flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
45
+ # flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
46
+
47
+ require 'rbyaml/error'
48
+ require 'rbyaml/tokens'
49
+ require 'rbyaml/events'
50
+ require 'rbyaml/scanner'
51
+
52
+ module RbYAML
53
+ class ParserError < MarkedYAMLError
54
+ end
55
+
56
+ module Parser
57
+
58
+ DEFAULT_TAGS = {
59
+ '!' => '!',
60
+ '!!' => 'tag:yaml.org,2002:'
61
+ }
62
+
63
+ def initialize_parser
64
+ @current_event = nil
65
+ @yaml_version = nil
66
+ @events = nil
67
+ @working_events = nil
68
+ @tag_handles = { }
69
+ end
70
+
71
+ def check_event(*choices)
72
+ init_events
73
+ @current_event = @working_events.shift if @current_event.nil?
74
+ if @current_event
75
+ return true if choices.empty?
76
+ for choice in choices
77
+ return true if choice === @current_event
78
+ end
79
+ end
80
+ false
81
+ end
82
+
83
+ def peek_event
84
+ init_events
85
+ @current_event = @working_events.shift if @current_event.nil?
86
+ @current_event
87
+ end
88
+
89
+ def get_event
90
+ init_events
91
+ @current_event = @working_events.shift if @current_event.nil?
92
+ value = @current_event
93
+ @current_event = nil
94
+ value
95
+ end
96
+
97
+ def init_events
98
+ @events ||= parse_stream
99
+ @working_events ||= @events
100
+ end
101
+
102
+ def each_event(&block)
103
+ init_events
104
+ @events.each(&block)
105
+ end
106
+
107
+ def parse_stream
108
+ # STREAM-START implicit_document? explicit_document* STREAM-END
109
+
110
+ # Parse start of stream.
111
+ events = []
112
+ token = get_token
113
+ events << StreamStartEvent.new(token.start_mark, token.end_mark,token.encoding)
114
+
115
+ # Parse implicit document.
116
+ unless check_token(DirectiveToken, DocumentStartToken,StreamEndToken)
117
+ @tag_handles = DEFAULT_TAGS
118
+ token = peek_token
119
+ start_mark = end_mark = token.start_mark
120
+ events << DocumentStartEvent.new(start_mark, end_mark,false)
121
+ events += parse_block_node
122
+ token = peek_token
123
+ start_mark = end_mark = token.start_mark
124
+ explicit = false
125
+ while check_token(DocumentEndToken)
126
+ token = get_token
127
+ end_mark = token.end_mark
128
+ explicit = true
129
+ end
130
+ events << DocumentEndEvent.new(start_mark, end_mark,explicit)
131
+ end
132
+
133
+ # Parse explicit documents.
134
+ while !check_token(StreamEndToken)
135
+ token = peek_token
136
+ start_mark = token.start_mark
137
+ version, tags = process_directives
138
+ raise ParserError.new(nil, nil,"expected '<document start>', but found #{peek_token.id}",peek_token.start_mark) unless check_token(DocumentStartToken)
139
+ token = get_token
140
+ end_mark = token.end_mark
141
+ events << DocumentStartEvent.new(start_mark, end_mark,true,version,tags)
142
+ if check_token(DirectiveToken,DocumentStartToken, DocumentEndToken, StreamEndToken)
143
+ events << process_empty_scalar(token.end_mark)
144
+ else
145
+ events += parse_block_node
146
+ end
147
+ token = peek_token
148
+ start_mark = end_mark = token.start_mark
149
+ explicit = false
150
+ while check_token(DocumentEndToken)
151
+ token = get_token
152
+ end_mark = token.end_mark
153
+ explicit=true
154
+ end
155
+ events << DocumentEndEvent.new(start_mark, end_mark,explicit)
156
+ end
157
+ # Parse end of stream.
158
+ token = get_token
159
+ events << StreamEndEvent.new(token.start_mark, token.end_mark)
160
+ events
161
+ end
162
+
163
+ def process_directives
164
+ # DIRECTIVE*
165
+ while check_token(DirectiveToken)
166
+ token = get_token
167
+ if token.name == "YAML"
168
+ raise ParserError.new(nil, nil,"found duplicate YAML directive", token.start_mark) if !@yaml_version.nil?
169
+ major, minor = token.value[0].to_i, token.value[1].to_i
170
+ raise ParserError.new(nil,nil,"found incompatible YAML document (version 1.* is required)",token.start_mark) if major != 1
171
+ @yaml_version = [major,minor]
172
+ elsif token.name == "TAG"
173
+ handle, prefix = token.value
174
+ raise ParserError.new(nil,nil,"duplicate tag handle #{handle}",token.start_mark) if @tag_handles.member?(handle)
175
+ @tag_handles[handle] = prefix
176
+ end
177
+ end
178
+ if !@tag_handles.empty?
179
+ value = @yaml_version, @tag_handles.dup
180
+ else
181
+ value = @yaml_version, nil
182
+ end
183
+ for key in DEFAULT_TAGS.keys
184
+ @tag_handles[key] = DEFAULT_TAGS[key] if !@tag_handles.include?(key)
185
+ end
186
+ value
187
+ end
188
+
189
+ def parse_block_node
190
+ parse_node(true)
191
+ end
192
+
193
+ def parse_flow_node
194
+ parse_node
195
+ end
196
+
197
+ def parse_block_node_or_indentless_sequence
198
+ parse_node(true, true)
199
+ end
200
+
201
+ def parse_node(block=false, indentless_sequence=false)
202
+ # block_node ::= ALIAS | properties? block_content
203
+ # flow_node ::= ALIAS | properties? flow_content
204
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
205
+ # block_content ::= block_collection | flow_collection | SCALAR
206
+ # flow_content ::= flow_collection | SCALAR
207
+ # block_collection ::= block_sequence | block_mapping
208
+ # block_node_or_indentless_sequence ::= ALIAS | properties?
209
+ # (block_content | indentless_block_sequence)
210
+ events = []
211
+ if check_token(AliasToken)
212
+ token = get_token
213
+ events << AliasEvent.new(token.value, token.start_mark, token.end_mark)
214
+ else
215
+ anchor = nil
216
+ tag = nil
217
+ start_mark = end_mark = tag_mark = nil
218
+ if check_token(AnchorToken)
219
+ token = get_token
220
+ start_mark = token.start_mark
221
+ end_mark = token.end_mark
222
+ anchor = token.value
223
+ if check_token(TagToken)
224
+ token = get_token
225
+ tag_mark = token.start_mark
226
+ end_mark = token.end_mark
227
+ tag = token.value
228
+ end
229
+ elsif check_token(TagToken)
230
+ token = get_token
231
+ start_mark = tag_mark = token.start_mark
232
+ end_mark = token.end_mark
233
+ tag = token.value
234
+ if check_token(AnchorToken)
235
+ token = get_token
236
+ end_mark = token.end_mark
237
+ anchor = token.value
238
+ end
239
+ end
240
+
241
+ if !tag.nil? and tag != "!"
242
+ handle, suffix = tag
243
+ if !handle.nil?
244
+ raise ParserError.new("while parsing a node", start_mark,"found undefined tag handle #{handle}",tag_mark) if !@tag_handles.include?(handle)
245
+ tag = @tag_handles[handle]+suffix
246
+ else
247
+ tag = suffix
248
+ end
249
+ end
250
+
251
+ #if tag == u'!':
252
+ # raise ParserError("while parsing a node", start_mark,
253
+ # "found non-specific tag '!'", tag_mark,
254
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
255
+ if start_mark.nil?
256
+ start_mark = end_mark = peek_token.start_mark
257
+ end
258
+ event = nil
259
+ collection_events = nil
260
+ if indentless_sequence && check_token(BlockEntryToken)
261
+ end_mark = peek_token.end_mark
262
+ event = SequenceStartEvent.new(anchor, tag, start_mark, end_mark)
263
+ collection_events = parse_indentless_sequence
264
+ else
265
+ if check_token(ScalarToken)
266
+ token = get_token
267
+ end_mark = token.end_mark
268
+ implicit = ((tag.nil? || tag == "!") && token.implicit)
269
+ event = ScalarEvent.new(anchor, tag, implicit, token.value,start_mark, end_mark,token.style)
270
+ elsif check_token(FlowSequenceStartToken)
271
+ end_mark = peek_token.end_mark
272
+ event = SequenceStartEvent.new(anchor, tag, start_mark, end_mark,true)
273
+ collection_events = parse_flow_sequence
274
+ elsif check_token(FlowMappingStartToken)
275
+ end_mark = peek_token.end_mark
276
+ event = MappingStartEvent.new(anchor, tag, start_mark, end_mark,true)
277
+ collection_events = parse_flow_mapping
278
+ elsif block && check_token(BlockSequenceStartToken)
279
+ end_mark = peek_token.start_mark
280
+ event = SequenceStartEvent.new(anchor, tag, start_mark, end_mark,false)
281
+ collection_events = parse_block_sequence
282
+ elsif block && check_token(BlockMappingStartToken)
283
+ end_mark = peek_token.start_mark
284
+ event = MappingStartEvent.new(anchor, tag, start_mark, end_mark,false)
285
+ collection_events = parse_block_mapping
286
+ elsif !anchor.nil? || !tag.nil?
287
+ # Empty scalars are allowed even if a tag or an anchor is
288
+ # specified.
289
+ implicit = (tag.nil? || tag == "!")
290
+ event = ScalarEvent.new(anchor, tag, implicit,"",start_mark, end_mark)
291
+ else
292
+ if block
293
+ node = "block"
294
+ else
295
+ node = "flow"
296
+ end
297
+ token = peek_token
298
+ raise ParserError.new("while scanning a #{node} node", start_mark,"expected the node content, but found #{token.tid}",token.start_mark)
299
+ end
300
+ end
301
+ events << event
302
+ events += collection_events if collection_events
303
+ end
304
+ events
305
+ end
306
+
307
+ def parse_block_sequence
308
+ # BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
309
+ events = []
310
+ token = get_token
311
+ start_mark = token.start_mark
312
+ while check_token(BlockEntryToken)
313
+ token = get_token
314
+ if !check_token(BlockEntryToken, BlockEndToken)
315
+ events += parse_block_node
316
+ else
317
+ events << process_empty_scalar(token.end_mark)
318
+ end
319
+ end
320
+ if !check_token(BlockEndToken)
321
+ token = peek_token
322
+ raise ParserError.new("while scanning a block collection", start_mark,"expected <block end>, but found #{token.tid}", token.start_mark)
323
+ end
324
+ token = get_token
325
+ events << SequenceEndEvent.new(token.start_mark, token.end_mark)
326
+ events
327
+ end
328
+
329
+ def parse_indentless_sequence
330
+ # (BLOCK-ENTRY block_node?)+
331
+ events = []
332
+ while check_token(BlockEntryToken)
333
+ token = get_token
334
+ if !check_token(BlockEntryToken,KeyToken, ValueToken, BlockEndToken)
335
+ events += parse_block_node
336
+ else
337
+ events << process_empty_scalar(token.end_mark)
338
+ end
339
+ end
340
+ token = peek_token
341
+ events << SequenceEndEvent.new(token.start_mark, token.start_mark)
342
+ events
343
+ end
344
+
345
+
346
+ def parse_block_mapping
347
+ # BLOCK-MAPPING_START
348
+ # ((KEY block_node_or_indentless_sequence?)?
349
+ # (VALUE block_node_or_indentless_sequence?)?)*
350
+ # BLOCK-END
351
+ events = []
352
+ token = get_token
353
+ start_mark = token.start_mark
354
+ while check_token(KeyToken, ValueToken)
355
+ if check_token(KeyToken)
356
+ token = get_token
357
+ if !check_token(KeyToken, ValueToken, BlockEndToken)
358
+ events += parse_block_node_or_indentless_sequence
359
+ else
360
+ events << process_empty_scalar(token.end_mark)
361
+ end
362
+ end
363
+ if check_token(ValueToken)
364
+ token = get_token
365
+ if !check_token(KeyToken, ValueToken, BlockEndToken)
366
+ events += parse_block_node_or_indentless_sequence
367
+ else
368
+ events << process_empty_scalar(token.end_mark)
369
+ end
370
+ else
371
+ token = peek_token
372
+ events << process_empty_scalar(token.start_mark)
373
+ end
374
+ end
375
+ if !check_token(BlockEndToken)
376
+ token = peek_token
377
+ raise ParserError.new("while scanning a block mapping", start_mark,"expected <block end>, but found #{token.tid}", token.start_mark)
378
+ end
379
+ token = get_token
380
+ events << MappingEndEvent.new(token.start_mark, token.end_mark)
381
+ events
382
+ end
383
+
384
+ def parse_flow_sequence
385
+ # flow_sequence ::= FLOW-SEQUENCE-START
386
+ # (flow_sequence_entry FLOW-ENTRY)*
387
+ # flow_sequence_entry?
388
+ # FLOW-SEQUENCE-END
389
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
390
+ #
391
+ # Note that while production rules for both flow_sequence_entry and
392
+ # flow_mapping_entry are equal, their interpretations are different.
393
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
394
+ # generate an inline mapping (set syntax).
395
+ events = []
396
+ token = get_token
397
+ start_mark = token.start_mark
398
+ while !check_token(FlowSequenceEndToken)
399
+ if check_token(KeyToken)
400
+ token = get_token
401
+ events << MappingStartEvent.new(nil,nil,token.start_mark, token.end_mark,true)
402
+ if !check_token(ValueToken,FlowEntryToken, FlowSequenceEndToken)
403
+ events += parse_flow_node
404
+ else
405
+ events << process_empty_scalar(token.end_mark)
406
+ end
407
+ if check_token(ValueToken)
408
+ token = get_token
409
+ if !check_token(FlowEntryToken, FlowSequenceEndToken)
410
+ events += parse_flow_node
411
+ else
412
+ events << process_empty_scalar(token.end_mark)
413
+ end
414
+ else
415
+ token = peek_token
416
+ events << process_empty_scalar(token.start_mark)
417
+ end
418
+ token = peek_token
419
+ events << MappingEndEvent.new(token.start_mark, token.start_mark)
420
+ else
421
+ events += parse_flow_node
422
+ end
423
+ if !check_token(FlowEntryToken, FlowSequenceEndToken)
424
+ token = peek_token
425
+ raise ParserError.new("while scanning a flow sequence", start_mark,"expected ',' or ']', but got #{token.tid}", token.start_mark)
426
+ end
427
+ if check_token(FlowEntryToken)
428
+ get_token
429
+ end
430
+ end
431
+ token = get_token
432
+ events << SequenceEndEvent.new(token.start_mark, token.end_mark)
433
+ events
434
+ end
435
+
436
+ def parse_flow_mapping
437
+ # flow_mapping ::= FLOW-MAPPING-START
438
+ # (flow_mapping_entry FLOW-ENTRY)*
439
+ # flow_mapping_entry?
440
+ # FLOW-MAPPING-END
441
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
442
+ events = []
443
+ token = get_token
444
+ start_mark = token.start_mark
445
+ while !check_token(FlowMappingEndToken)
446
+ if check_token(KeyToken)
447
+ token = get_token
448
+ if !check_token(ValueToken,FlowEntryToken, FlowMappingEndToken)
449
+ events += parse_flow_node
450
+ else
451
+ events << process_empty_scalar(token.end_mark)
452
+ end
453
+ if check_token(ValueToken)
454
+ token = get_token
455
+ if !check_token(FlowEntryToken, FlowMappingEndToken)
456
+ events += parse_flow_node
457
+ else
458
+ events << process_empty_scalar(token.end_mark)
459
+ end
460
+ else
461
+ token = peek_token
462
+ events << process_empty_scalar(token.start_mark)
463
+ end
464
+ else
465
+ events += parse_flow_node
466
+ events << process_empty_scalar(peek_token.start_mark)
467
+ end
468
+ if !check_token(FlowEntryToken, FlowMappingEndToken)
469
+ token = peek_token
470
+ raise ParserError.new("while scanning a flow mapping", start_mark,"expected ',' or '}', but got #{token.tid}", token.start_mark)
471
+ end
472
+ get_token if check_token(FlowEntryToken)
473
+ end
474
+ if !check_token(FlowMappingEndToken)
475
+ token = peek_token
476
+ raise ParserError.new("while scanning a flow mapping", start_mark,"expected '}', but found #{token.tid}", token.start_mark)
477
+ end
478
+ token = get_token
479
+ events << MappingEndEvent.new(token.start_mark, token.end_mark)
480
+ events
481
+ end
482
+
483
+ def process_empty_scalar(mark)
484
+ ScalarEvent.new(nil, nil, nil, "", mark, mark)
485
+ end
486
+ end
487
+ end
488
+