RbYAML 0.0.2 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (44) hide show
  1. data/README +1 -1
  2. data/lib/rbyaml/composer.rb +28 -25
  3. data/lib/rbyaml/composer.rb.~1.2.~ +109 -0
  4. data/lib/rbyaml/constructor.rb +94 -84
  5. data/lib/rbyaml/constructor.rb.~1.2.~ +381 -0
  6. data/lib/rbyaml/dumper.rb +10 -17
  7. data/lib/rbyaml/dumper.rb.~1.2.~ +43 -0
  8. data/lib/rbyaml/emitter.rb +13 -26
  9. data/lib/rbyaml/emitter.rb.~1.2.~ +1116 -0
  10. data/lib/rbyaml/error.rb +15 -21
  11. data/lib/rbyaml/events.rb +29 -5
  12. data/lib/rbyaml/events.rb.~1.2.~ +93 -0
  13. data/lib/rbyaml/loader.rb +11 -23
  14. data/lib/rbyaml/loader.rb.~1.2.~ +52 -0
  15. data/lib/rbyaml/nodes.rb +13 -9
  16. data/lib/rbyaml/nodes.rb.~1.2.~ +52 -0
  17. data/lib/rbyaml/parser.rb +481 -343
  18. data/lib/rbyaml/parser.rb.old +531 -0
  19. data/lib/rbyaml/parser.rb.~1.2.~ +494 -0
  20. data/lib/rbyaml/reader.rb.~1.1.1.1.~ +127 -0
  21. data/lib/rbyaml/representer.rb +26 -17
  22. data/lib/rbyaml/representer.rb.~1.2.~ +239 -0
  23. data/lib/rbyaml/resolver.rb +15 -15
  24. data/lib/rbyaml/resolver.rb.~1.1.~ +163 -0
  25. data/lib/rbyaml/scanner.rb +457 -366
  26. data/lib/rbyaml/scanner.rb.~1.2.~ +1259 -0
  27. data/lib/rbyaml/serializer.rb +19 -17
  28. data/lib/rbyaml/serializer.rb.~1.2.~ +115 -0
  29. data/lib/rbyaml/tokens.rb +44 -4
  30. data/lib/rbyaml/tokens.rb.~1.2.~ +164 -0
  31. data/lib/rbyaml/util.rb +28 -0
  32. data/lib/rbyaml/yaml.rb +12 -12
  33. data/lib/rbyaml/yaml.rb.~1.2.~ +136 -0
  34. data/test/test_bm.rb +28 -0
  35. data/test/test_bm_syck.rb +28 -0
  36. data/test/test_invoke.rb +31 -0
  37. data/test/test_one.rb +5 -0
  38. data/test/test_profile.rb +32 -0
  39. data/test/test_rbyaml.rb +2 -1
  40. data/test/test_rbyaml.rb.~1.2.~ +31 -0
  41. data/test/test_time.rb +13 -8
  42. data/test/test_time.rb.~1.1.~ +29 -0
  43. data/test/yamlx.rb +3563 -0
  44. metadata +27 -2
@@ -0,0 +1,531 @@
1
+
2
+ # YAML can be parsed by an LL(1) parser!
3
+ #
4
+ # We use the following production rules:
5
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
6
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END?
7
+ # implicit_document ::= block_node DOCUMENT-END?
8
+ # block_node ::= ALIAS | properties? block_content
9
+ # flow_node ::= ALIAS | properties? flow_content
10
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
11
+ # block_content ::= block_collection | flow_collection | SCALAR
12
+ # flow_content ::= flow_collection | SCALAR
13
+ # block_collection ::= block_sequence | block_mapping
14
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
15
+ # block_mapping ::= BLOCK-MAPPING_START ((KEY block_node_or_indentless_sequence?)? (VALUE block_node_or_indentless_sequence?)?)* BLOCK-END
16
+ # block_node_or_indentless_sequence ::= ALIAS | properties? (block_content | indentless_block_sequence)
17
+ # indentless_block_sequence ::= (BLOCK-ENTRY block_node?)+
18
+ # flow_collection ::= flow_sequence | flow_mapping
19
+ # flow_sequence ::= FLOW-SEQUENCE-START (flow_sequence_entry FLOW-ENTRY)* flow_sequence_entry? FLOW-SEQUENCE-END
20
+ # flow_mapping ::= FLOW-MAPPING-START (flow_mapping_entry FLOW-ENTRY)* flow_mapping_entry? FLOW-MAPPING-END
21
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
22
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
23
+
24
+ # TODO: support for BOM within a stream.
25
+ # stream ::= (BOM? implicit_document)? (BOM? explicit_document)* STREAM-END
26
+
27
+ # FIRST sets:
28
+ # stream: { STREAM-START }
29
+ # explicit_document: { DIRECTIVE DOCUMENT-START }
30
+ # implicit_document: FIRST(block_node)
31
+ # block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
32
+ # flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
33
+ # block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
34
+ # flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
35
+ # block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
36
+ # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
37
+ # block_sequence: { BLOCK-SEQUENCE-START }
38
+ # block_mapping: { BLOCK-MAPPING-START }
39
+ # block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
40
+ # indentless_sequence: { ENTRY }
41
+ # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
42
+ # flow_sequence: { FLOW-SEQUENCE-START }
43
+ # flow_mapping: { FLOW-MAPPING-START }
44
+ # flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
45
+ # flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
46
+
47
+ require 'rbyaml/error'
48
+ require 'rbyaml/tokens'
49
+ require 'rbyaml/events'
50
+ require 'rbyaml/scanner'
51
+
52
+ module RbYAML
53
+ class ParserError < MarkedYAMLError
54
+ end
55
+
56
+ class Parser
57
+ DEFAULT_TAGS = {
58
+ '!' => '!',
59
+ '!!' => 'tag:yaml.org,2002:'
60
+ }
61
+
62
+ def initialize(scanner)
63
+ @scanner = scanner
64
+ @current_event = nil
65
+ @yaml_version = nil
66
+ # @events = nil
67
+ @working_events = nil
68
+ @tag_handles = { }
69
+ end
70
+
71
+ def check_event(*choices):
72
+ # Check the type of the next event.
73
+ if self.current_event is None:
74
+ try:
75
+ self.current_event = self.event_generator.next()
76
+ except StopIteration:
77
+ pass
78
+ if self.current_event is not None:
79
+ if not choices:
80
+ return True
81
+ for choice in choices:
82
+ if isinstance(self.current_event, choice):
83
+ return True
84
+ return False
85
+
86
+ def peek_event(self):
87
+ # Get the next event.
88
+ if self.current_event is None:
89
+ try:
90
+ self.current_event = self.event_generator.next()
91
+ except StopIteration:
92
+ pass
93
+ return self.current_event
94
+
95
+ def get_event(self):
96
+ # Get the next event.
97
+ if self.current_event is None:
98
+ try:
99
+ self.current_event = self.event_generator.next()
100
+ except StopIteration:
101
+ pass
102
+ value = self.current_event
103
+ print("current_event: %s" % value)
104
+ self.current_event = None
105
+ return value
106
+
107
+ # def check_event(*choices)
108
+ # init_events
109
+ # @current_event = @working_events.shift if @current_event.nil?
110
+ # if @current_event
111
+ # return true if choices.empty?
112
+ # for choice in choices
113
+ # return true if choice === @current_event
114
+ # end
115
+ # end
116
+ # false
117
+ # end
118
+
119
+ # def peek_event
120
+ # init_events
121
+ # @current_event = @working_events.shift if @current_event.nil?
122
+ # @current_event
123
+ # end
124
+
125
+ # def get_event
126
+ # init_events
127
+ # @current_event = @working_events.shift if @current_event.nil?
128
+ # value = @current_event
129
+ # @current_event = nil
130
+ # value
131
+ # end
132
+
133
+ # def init_events
134
+ # @events ||= parse_stream
135
+ # @working_events ||= @events
136
+ # end
137
+
138
+ def each_event(&block)
139
+ # init_events
140
+ parse_stream(&block)
141
+ # @events.each(&block)
142
+ end
143
+
144
+ def parse_stream
145
+ # STREAM-START implicit_document? explicit_document* STREAM-END
146
+
147
+ # Parse start of stream.
148
+ events = []
149
+ token = @scanner.get_token
150
+ events << StreamStartEvent.new(token.start_mark, token.end_mark,token.encoding)
151
+
152
+ # Parse implicit document.
153
+ unless @scanner.check_token(DirectiveToken, DocumentStartToken,StreamEndToken)
154
+ @tag_handles = DEFAULT_TAGS
155
+ token = @scanner.peek_token
156
+ start_mark = end_mark = token.start_mark
157
+ events << DocumentStartEvent.new(start_mark, end_mark,false)
158
+ events += parse_block_node
159
+ token = @scanner.peek_token
160
+ start_mark = end_mark = token.start_mark
161
+ explicit = false
162
+ while @scanner.check_token(DocumentEndToken)
163
+ token = @scanner.get_token
164
+ end_mark = token.end_mark
165
+ explicit = true
166
+ end
167
+ events << DocumentEndEvent.new(start_mark, end_mark,explicit)
168
+ end
169
+
170
+ # Parse explicit documents.
171
+ while !@scanner.check_token(StreamEndToken)
172
+ token = @scanner.peek_token
173
+ start_mark = token.start_mark
174
+ version, tags = process_directives
175
+ raise ParserError.new(nil, nil,"expected '<document start>', but found #{peek_token.id}",@scanner.peek_token.start_mark) unless @scanner.check_token(DocumentStartToken)
176
+ token = @scanner.get_token
177
+ end_mark = token.end_mark
178
+ events << DocumentStartEvent.new(start_mark, end_mark,true,version,tags)
179
+ if @scanner.check_token(DirectiveToken,DocumentStartToken, DocumentEndToken, StreamEndToken)
180
+ events << process_empty_scalar(token.end_mark)
181
+ else
182
+ events += parse_block_node
183
+ end
184
+ token = @scanner.peek_token
185
+ start_mark = end_mark = token.start_mark
186
+ explicit = false
187
+ while @scanner.check_token(DocumentEndToken)
188
+ token = @scanner.get_token
189
+ end_mark = token.end_mark
190
+ explicit=true
191
+ end
192
+ events << DocumentEndEvent.new(start_mark, end_mark,explicit)
193
+ end
194
+ # Parse end of stream.
195
+ token = @scanner.get_token
196
+ events << StreamEndEvent.new(token.start_mark, token.end_mark)
197
+ events
198
+ end
199
+
200
+ def process_directives
201
+ # DIRECTIVE*
202
+ while @scanner.check_token(DirectiveToken)
203
+ token = @scanner.get_token
204
+ if token.name == "YAML"
205
+ raise ParserError.new(nil, nil,"found duplicate YAML directive", token.start_mark) if !@yaml_version.nil?
206
+ major, minor = token.value[0].to_i, token.value[1].to_i
207
+ raise ParserError.new(nil,nil,"found incompatible YAML document (version 1.* is required)",token.start_mark) if major != 1
208
+ @yaml_version = [major,minor]
209
+ elsif token.name == "TAG"
210
+ handle, prefix = token.value
211
+ raise ParserError.new(nil,nil,"duplicate tag handle #{handle}",token.start_mark) if @tag_handles.member?(handle)
212
+ @tag_handles[handle] = prefix
213
+ end
214
+ end
215
+ if !@tag_handles.empty?
216
+ value = @yaml_version, @tag_handles.dup
217
+ else
218
+ value = @yaml_version, nil
219
+ end
220
+ for key in DEFAULT_TAGS.keys
221
+ @tag_handles[key] = DEFAULT_TAGS[key] if !@tag_handles.include?(key)
222
+ end
223
+ value
224
+ end
225
+
226
+ def parse_block_node
227
+ parse_node(true)
228
+ end
229
+
230
+ def parse_flow_node
231
+ parse_node
232
+ end
233
+
234
+ def parse_block_node_or_indentless_sequence
235
+ parse_node(true, true)
236
+ end
237
+
238
+ def parse_node(block=false, indentless_sequence=false)
239
+ # block_node ::= ALIAS | properties? block_content
240
+ # flow_node ::= ALIAS | properties? flow_content
241
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
242
+ # block_content ::= block_collection | flow_collection | SCALAR
243
+ # flow_content ::= flow_collection | SCALAR
244
+ # block_collection ::= block_sequence | block_mapping
245
+ # block_node_or_indentless_sequence ::= ALIAS | properties?
246
+ # (block_content | indentless_block_sequence)
247
+ events = []
248
+ if @scanner.check_token(AliasToken)
249
+ token = @scanner.get_token
250
+ events << AliasEvent.new(token.value, token.start_mark, token.end_mark)
251
+ else
252
+ anchor = nil
253
+ tag = nil
254
+ start_mark = end_mark = tag_mark = nil
255
+ if @scanner.check_token(AnchorToken)
256
+ token = @scanner.get_token
257
+ start_mark = token.start_mark
258
+ end_mark = token.end_mark
259
+ anchor = token.value
260
+ if @scanner.check_token(TagToken)
261
+ token = @scanner.get_token
262
+ tag_mark = token.start_mark
263
+ end_mark = token.end_mark
264
+ tag = token.value
265
+ end
266
+ elsif @scanner.check_token(TagToken)
267
+ token = @scanner.get_token
268
+ start_mark = tag_mark = token.start_mark
269
+ end_mark = token.end_mark
270
+ tag = token.value
271
+ if @scanner.check_token(AnchorToken)
272
+ token = @scanner.get_token
273
+ end_mark = token.end_mark
274
+ anchor = token.value
275
+ end
276
+ end
277
+
278
+ if !tag.nil? and tag != "!"
279
+ handle, suffix = tag
280
+ if !handle.nil?
281
+ raise ParserError.new("while parsing a node", start_mark,"found undefined tag handle #{handle}",tag_mark) if !@tag_handles.include?(handle)
282
+ tag = @tag_handles[handle]+suffix
283
+ else
284
+ tag = suffix
285
+ end
286
+ end
287
+
288
+ #if tag == u'!':
289
+ # raise ParserError("while parsing a node", start_mark,
290
+ # "found non-specific tag '!'", tag_mark,
291
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
292
+ if start_mark.nil?
293
+ start_mark = end_mark = @scanner.peek_token.start_mark
294
+ end
295
+ event = nil
296
+ collection_events = nil
297
+ implicit = tag.nil? || tag == ?!
298
+ if indentless_sequence && @scanner.check_token(BlockEntryToken)
299
+ end_mark = peek_token.end_mark
300
+ event = SequenceStartEvent.new(anchor, tag, implicit, start_mark, end_mark)
301
+ collection_events = parse_indentless_sequence
302
+ else
303
+ if @scanner.check_token(ScalarToken)
304
+ token = @scanner.get_token
305
+ end_mark = token.end_mark
306
+ if (token.plain && tag.nil?) || tag == ?!
307
+ implicit = [true, false]
308
+ elsif tag.nil?
309
+ implicit = [false, true]
310
+ else
311
+ implicit = [false, false]
312
+ end
313
+ event = ScalarEvent.new(anchor, tag, implicit, token.value,start_mark, end_mark,token.style)
314
+ elsif @scanner.check_token(FlowSequenceStartToken)
315
+ end_mark = @scanner.peek_token.end_mark
316
+ event = SequenceStartEvent.new(anchor, tag, implicit, start_mark, end_mark,true)
317
+ collection_events = parse_flow_sequence
318
+ elsif @scanner.check_token(FlowMappingStartToken)
319
+ end_mark = @scanner.peek_token.end_mark
320
+ event = MappingStartEvent.new(anchor, tag, implicit, start_mark, end_mark,true)
321
+ collection_events = parse_flow_mapping
322
+ elsif block && @scanner.check_token(BlockSequenceStartToken)
323
+ end_mark = @scanner.peek_token.start_mark
324
+ event = SequenceStartEvent.new(anchor, tag, implicit, start_mark, end_mark,false)
325
+ collection_events = parse_block_sequence
326
+ elsif block && @scanner.check_token(BlockMappingStartToken)
327
+ end_mark = @scanner.peek_token.start_mark
328
+ event = MappingStartEvent.new(anchor, tag, implicit, start_mark, end_mark,false)
329
+ collection_events = parse_block_mapping
330
+ elsif !anchor.nil? || !tag.nil?
331
+ # Empty scalars are allowed even if a tag or an anchor is
332
+ # specified.
333
+ event = ScalarEvent.new(anchor, tag, [implicit,false],"",start_mark, end_mark)
334
+ else
335
+ if block
336
+ node = "block"
337
+ else
338
+ node = "flow"
339
+ end
340
+ token = @scanner.peek_token
341
+ raise ParserError.new("while scanning a #{node} node", start_mark,"expected the node content, but found #{token.tid}",token.start_mark)
342
+ end
343
+ end
344
+ events << event
345
+ events += collection_events if collection_events
346
+ end
347
+ events
348
+ end
349
+
350
+ def parse_block_sequence
351
+ # BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
352
+ events = []
353
+ token = @scanner.get_token
354
+ start_mark = token.start_mark
355
+ while @scanner.check_token(BlockEntryToken)
356
+ token = @scanner.get_token
357
+ if !@scanner.check_token(BlockEntryToken, BlockEndToken)
358
+ events += parse_block_node
359
+ else
360
+ events << process_empty_scalar(token.end_mark)
361
+ end
362
+ end
363
+ if !@scanner.check_token(BlockEndToken)
364
+ token = @scanner.peek_token
365
+ raise ParserError.new("while scanning a block collection", start_mark,"expected <block end>, but found #{token.tid}", token.start_mark)
366
+ end
367
+ token = @scanner.get_token
368
+ events << SequenceEndEvent.new(token.start_mark, token.end_mark)
369
+ events
370
+ end
371
+
372
+ def parse_indentless_sequence
373
+ # (BLOCK-ENTRY block_node?)+
374
+ events = []
375
+ while @scanner.check_token(BlockEntryToken)
376
+ token = @scanner.get_token
377
+ if !@scanner.check_token(BlockEntryToken,KeyToken, ValueToken, BlockEndToken)
378
+ events += parse_block_node
379
+ else
380
+ events << process_empty_scalar(token.end_mark)
381
+ end
382
+ end
383
+ token = @scanner.peek_token
384
+ events << SequenceEndEvent.new(token.start_mark, token.start_mark)
385
+ events
386
+ end
387
+
388
+
389
+ def parse_block_mapping
390
+ # BLOCK-MAPPING_START
391
+ # ((KEY block_node_or_indentless_sequence?)?
392
+ # (VALUE block_node_or_indentless_sequence?)?)*
393
+ # BLOCK-END
394
+ events = []
395
+ token = @scanner.get_token
396
+ start_mark = token.start_mark
397
+ while @scanner.check_token(KeyToken, ValueToken)
398
+ if @scanner.check_token(KeyToken)
399
+ token = @scanner.get_token
400
+ if !@scanner.check_token(KeyToken, ValueToken, BlockEndToken)
401
+ events += parse_block_node_or_indentless_sequence
402
+ else
403
+ events << process_empty_scalar(token.end_mark)
404
+ end
405
+ end
406
+ if @scanner.check_token(ValueToken)
407
+ token = @scanner.get_token
408
+ if !@scanner.check_token(KeyToken, ValueToken, BlockEndToken)
409
+ events += parse_block_node_or_indentless_sequence
410
+ else
411
+ events << process_empty_scalar(token.end_mark)
412
+ end
413
+ else
414
+ token = @scanner.peek_token
415
+ events << process_empty_scalar(token.start_mark)
416
+ end
417
+ end
418
+ if !@scanner.check_token(BlockEndToken)
419
+ token = @scanner.peek_token
420
+ raise ParserError.new("while scanning a block mapping", start_mark,"expected <block end>, but found #{token.tid}", token.start_mark)
421
+ end
422
+ token = @scanner.get_token
423
+ events << MappingEndEvent.new(token.start_mark, token.end_mark)
424
+ events
425
+ end
426
+
427
+ def parse_flow_sequence
428
+ # flow_sequence ::= FLOW-SEQUENCE-START
429
+ # (flow_sequence_entry FLOW-ENTRY)*
430
+ # flow_sequence_entry?
431
+ # FLOW-SEQUENCE-END
432
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
433
+ #
434
+ # Note that while production rules for both flow_sequence_entry and
435
+ # flow_mapping_entry are equal, their interpretations are different.
436
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
437
+ # generate an inline mapping (set syntax).
438
+ events = []
439
+ token = @scanner.get_token
440
+ start_mark = token.start_mark
441
+ while !@scanner.check_token(FlowSequenceEndToken)
442
+ if @scanner.check_token(KeyToken)
443
+ token = @scanner.get_token
444
+ events << MappingStartEvent.new(nil,nil,true,token.start_mark, token.end_mark,true)
445
+ if !@scanner.check_token(ValueToken,FlowEntryToken, FlowSequenceEndToken)
446
+ events += parse_flow_node
447
+ else
448
+ events << process_empty_scalar(token.end_mark)
449
+ end
450
+ if @scanner.check_token(ValueToken)
451
+ token = @scanner.get_token
452
+ if !@scanner.check_token(FlowEntryToken, FlowSequenceEndToken)
453
+ events += parse_flow_node
454
+ else
455
+ events << process_empty_scalar(token.end_mark)
456
+ end
457
+ else
458
+ token = @scanner.peek_token
459
+ events << process_empty_scalar(token.start_mark)
460
+ end
461
+ token = @scanner.peek_token
462
+ events << MappingEndEvent.new(token.start_mark, token.start_mark)
463
+ else
464
+ events += parse_flow_node
465
+ end
466
+ if !@scanner.check_token(FlowEntryToken, FlowSequenceEndToken)
467
+ token = @scanner.peek_token
468
+ raise ParserError.new("while scanning a flow sequence", start_mark,"expected ',' or ']', but got #{token.tid}", token.start_mark)
469
+ end
470
+ if @scanner.check_token(FlowEntryToken)
471
+ @scanner.get_token
472
+ end
473
+ end
474
+ token = @scanner.get_token
475
+ events << SequenceEndEvent.new(token.start_mark, token.end_mark)
476
+ events
477
+ end
478
+
479
+ def parse_flow_mapping
480
+ # flow_mapping ::= FLOW-MAPPING-START
481
+ # (flow_mapping_entry FLOW-ENTRY)*
482
+ # flow_mapping_entry?
483
+ # FLOW-MAPPING-END
484
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
485
+ events = []
486
+ token = @scanner.get_token
487
+ start_mark = token.start_mark
488
+ while !@scanner.check_token(FlowMappingEndToken)
489
+ if @scanner.check_token(KeyToken)
490
+ token = @scanner.get_token
491
+ if !@scanner.check_token(ValueToken,FlowEntryToken, FlowMappingEndToken)
492
+ events += parse_flow_node
493
+ else
494
+ events << process_empty_scalar(token.end_mark)
495
+ end
496
+ if @scanner.check_token(ValueToken)
497
+ token = @scanner.get_token
498
+ if !@scanner.check_token(FlowEntryToken, FlowMappingEndToken)
499
+ events += parse_flow_node
500
+ else
501
+ events << process_empty_scalar(token.end_mark)
502
+ end
503
+ else
504
+ token = @scanner.peek_token
505
+ events << process_empty_scalar(token.start_mark)
506
+ end
507
+ else
508
+ events += parse_flow_node
509
+ events << process_empty_scalar(peek_token.start_mark)
510
+ end
511
+ if !@scanner.check_token(FlowEntryToken, FlowMappingEndToken)
512
+ token = @scanner.peek_token
513
+ raise ParserError.new("while scanning a flow mapping", start_mark,"expected ',' or '}', but got #{token.tid}", token.start_mark)
514
+ end
515
+ @scanner.get_token if @scanner.check_token(FlowEntryToken)
516
+ end
517
+ if !@scanner.check_token(FlowMappingEndToken)
518
+ token = @scanner.peek_token
519
+ raise ParserError.new("while scanning a flow mapping", start_mark,"expected '}', but found #{token.tid}", token.start_mark)
520
+ end
521
+ token = @scanner.get_token
522
+ events << MappingEndEvent.new(token.start_mark, token.end_mark)
523
+ events
524
+ end
525
+
526
+ def process_empty_scalar(mark)
527
+ ScalarEvent.new(nil, nil, [true, false], "", mark, mark)
528
+ end
529
+ end
530
+ end
531
+