RbYAML 0.0.2 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (44) hide show
  1. data/README +1 -1
  2. data/lib/rbyaml/composer.rb +28 -25
  3. data/lib/rbyaml/composer.rb.~1.2.~ +109 -0
  4. data/lib/rbyaml/constructor.rb +94 -84
  5. data/lib/rbyaml/constructor.rb.~1.2.~ +381 -0
  6. data/lib/rbyaml/dumper.rb +10 -17
  7. data/lib/rbyaml/dumper.rb.~1.2.~ +43 -0
  8. data/lib/rbyaml/emitter.rb +13 -26
  9. data/lib/rbyaml/emitter.rb.~1.2.~ +1116 -0
  10. data/lib/rbyaml/error.rb +15 -21
  11. data/lib/rbyaml/events.rb +29 -5
  12. data/lib/rbyaml/events.rb.~1.2.~ +93 -0
  13. data/lib/rbyaml/loader.rb +11 -23
  14. data/lib/rbyaml/loader.rb.~1.2.~ +52 -0
  15. data/lib/rbyaml/nodes.rb +13 -9
  16. data/lib/rbyaml/nodes.rb.~1.2.~ +52 -0
  17. data/lib/rbyaml/parser.rb +481 -343
  18. data/lib/rbyaml/parser.rb.old +531 -0
  19. data/lib/rbyaml/parser.rb.~1.2.~ +494 -0
  20. data/lib/rbyaml/reader.rb.~1.1.1.1.~ +127 -0
  21. data/lib/rbyaml/representer.rb +26 -17
  22. data/lib/rbyaml/representer.rb.~1.2.~ +239 -0
  23. data/lib/rbyaml/resolver.rb +15 -15
  24. data/lib/rbyaml/resolver.rb.~1.1.~ +163 -0
  25. data/lib/rbyaml/scanner.rb +457 -366
  26. data/lib/rbyaml/scanner.rb.~1.2.~ +1259 -0
  27. data/lib/rbyaml/serializer.rb +19 -17
  28. data/lib/rbyaml/serializer.rb.~1.2.~ +115 -0
  29. data/lib/rbyaml/tokens.rb +44 -4
  30. data/lib/rbyaml/tokens.rb.~1.2.~ +164 -0
  31. data/lib/rbyaml/util.rb +28 -0
  32. data/lib/rbyaml/yaml.rb +12 -12
  33. data/lib/rbyaml/yaml.rb.~1.2.~ +136 -0
  34. data/test/test_bm.rb +28 -0
  35. data/test/test_bm_syck.rb +28 -0
  36. data/test/test_invoke.rb +31 -0
  37. data/test/test_one.rb +5 -0
  38. data/test/test_profile.rb +32 -0
  39. data/test/test_rbyaml.rb +2 -1
  40. data/test/test_rbyaml.rb.~1.2.~ +31 -0
  41. data/test/test_time.rb +13 -8
  42. data/test/test_time.rb.~1.1.~ +29 -0
  43. data/test/yamlx.rb +3563 -0
  44. metadata +27 -2
@@ -53,24 +53,28 @@ module RbYAML
53
53
  class ParserError < MarkedYAMLError
54
54
  end
55
55
 
56
- module Parser
57
-
56
+ class Parser
58
57
  DEFAULT_TAGS = {
59
58
  '!' => '!',
60
59
  '!!' => 'tag:yaml.org,2002:'
61
60
  }
62
61
 
63
- def initialize_parser
62
+ def initialize(scanner)
63
+ @scanner = scanner
64
64
  @current_event = nil
65
65
  @yaml_version = nil
66
66
  @events = nil
67
67
  @working_events = nil
68
68
  @tag_handles = { }
69
+ @parse_stack = nil
70
+ @start_mark = []
71
+ @tks = []
72
+
69
73
  end
70
74
 
71
75
  def check_event(*choices)
72
- init_events
73
- @current_event = @working_events.shift if @current_event.nil?
76
+ parse_stream
77
+ @current_event = parse_stream_next if @current_event.nil?
74
78
  if @current_event
75
79
  return true if choices.empty?
76
80
  for choice in choices
@@ -81,409 +85,543 @@ module RbYAML
81
85
  end
82
86
 
83
87
  def peek_event
84
- init_events
85
- @current_event = @working_events.shift if @current_event.nil?
88
+ parse_stream
89
+ @current_event = parse_stream_next unless @current_event
86
90
  @current_event
87
91
  end
88
92
 
89
93
  def get_event
90
- init_events
91
- @current_event = @working_events.shift if @current_event.nil?
94
+ parse_stream
95
+ @current_event = parse_stream_next unless @current_event
92
96
  value = @current_event
93
97
  @current_event = nil
94
98
  value
95
99
  end
96
100
 
97
- def init_events
98
- @events ||= parse_stream
99
- @working_events ||= @events
100
- end
101
-
102
- def each_event(&block)
103
- init_events
104
- @events.each(&block)
101
+ def each_event
102
+ parse_stream
103
+ while @current_event = parse_stream_next
104
+ yield @current_event
105
+ end
105
106
  end
106
107
 
107
108
  def parse_stream
108
- # STREAM-START implicit_document? explicit_document* STREAM-END
109
-
110
- # Parse start of stream.
111
- events = []
112
- token = get_token
113
- events << StreamStartEvent.new(token.start_mark, token.end_mark,token.encoding)
114
-
115
- # Parse implicit document.
116
- unless check_token(DirectiveToken, DocumentStartToken,StreamEndToken)
117
- @tag_handles = DEFAULT_TAGS
118
- token = peek_token
119
- start_mark = end_mark = token.start_mark
120
- events << DocumentStartEvent.new(start_mark, end_mark,false)
121
- events += parse_block_node
122
- token = peek_token
123
- start_mark = end_mark = token.start_mark
124
- explicit = false
125
- while check_token(DocumentEndToken)
126
- token = get_token
127
- end_mark = token.end_mark
128
- explicit = true
109
+ if !@parse_stack
110
+ @parse_stack = [:stream]
111
+ @tokens = nil
112
+ @tags = []
113
+ @anchors = []
114
+ @start_marks = []
115
+ @end_marks = []
116
+ end
117
+ end
118
+
119
+ def parse_stream_next
120
+ if !@parse_stack.empty?
121
+ while true
122
+ meth = @parse_stack.pop
123
+ #puts "our method: :#{meth}"
124
+ #puts "--- with peeked: :#{@scanner.peek_token.class} #{if @scanner.peek_token.respond_to?(:value): @scanner.peek_token.value.inspect; end}"
125
+ val = send(meth)
126
+ if !val.nil?
127
+ #puts "returning: #{val}"
128
+ return val
129
+ end
129
130
  end
130
- events << DocumentEndEvent.new(start_mark, end_mark,explicit)
131
+ else
132
+ @tokens = nil
133
+ @tags = []
134
+ @anchors = []
135
+ @start_marks = []
136
+ @end_marks = []
137
+ return nil
131
138
  end
139
+ end
132
140
 
133
- # Parse explicit documents.
134
- while !check_token(StreamEndToken)
135
- token = peek_token
136
- start_mark = token.start_mark
137
- version, tags = process_directives
138
- raise ParserError.new(nil, nil,"expected '<document start>', but found #{peek_token.id}",peek_token.start_mark) unless check_token(DocumentStartToken)
139
- token = get_token
141
+ #TERMINALS, definitions
142
+
143
+ def stream_start
144
+ token = @scanner.get_token
145
+ StreamStartEvent.new(token.start_mark, token.end_mark,token.encoding)
146
+ end
147
+
148
+ def stream_end
149
+ token = @scanner.get_token
150
+ StreamEndEvent.new(token.start_mark, token.end_mark)
151
+ end
152
+
153
+ def document_start_implicit
154
+ token = @scanner.peek_token
155
+ version, tags = process_directives
156
+ DocumentStartEvent.new(token.start_mark,token.start_mark,false)
157
+ end
158
+
159
+ def document_start
160
+ token = @scanner.peek_token
161
+ start_mark = token.start_mark
162
+ version, tags = process_directives
163
+ raise ParserError.new(nil, nil,"expected '<document start>', but found #{token.tid}",token.start_mark) unless @scanner.peek_token.__is_document_start
164
+ @token = token = @scanner.get_token
165
+ end_mark = token.end_mark
166
+ DocumentStartEvent.new(start_mark, end_mark,true,version,tags)
167
+ end
168
+
169
+ def document_end
170
+ token = @scanner.peek_token
171
+ start_mark = end_mark = token.start_mark
172
+ explicit = false
173
+ while @scanner.peek_token.__is_document_end
174
+ @tokens = token = @scanner.get_token
140
175
  end_mark = token.end_mark
141
- events << DocumentStartEvent.new(start_mark, end_mark,true,version,tags)
142
- if check_token(DirectiveToken,DocumentStartToken, DocumentEndToken, StreamEndToken)
143
- events << process_empty_scalar(token.end_mark)
144
- else
145
- events += parse_block_node
146
- end
147
- token = peek_token
148
- start_mark = end_mark = token.start_mark
149
- explicit = false
150
- while check_token(DocumentEndToken)
151
- token = get_token
152
- end_mark = token.end_mark
153
- explicit=true
154
- end
155
- events << DocumentEndEvent.new(start_mark, end_mark,explicit)
176
+ explicit = true
156
177
  end
157
- # Parse end of stream.
158
- token = get_token
159
- events << StreamEndEvent.new(token.start_mark, token.end_mark)
160
- events
178
+ DocumentEndEvent.new(start_mark, end_mark, explicit)
161
179
  end
162
180
 
163
- def process_directives
164
- # DIRECTIVE*
165
- while check_token(DirectiveToken)
166
- token = get_token
167
- if token.name == "YAML"
168
- raise ParserError.new(nil, nil,"found duplicate YAML directive", token.start_mark) if !@yaml_version.nil?
169
- major, minor = token.value[0].to_i, token.value[1].to_i
170
- raise ParserError.new(nil,nil,"found incompatible YAML document (version 1.* is required)",token.start_mark) if major != 1
171
- @yaml_version = [major,minor]
172
- elsif token.name == "TAG"
173
- handle, prefix = token.value
174
- raise ParserError.new(nil,nil,"duplicate tag handle #{handle}",token.start_mark) if @tag_handles.member?(handle)
175
- @tag_handles[handle] = prefix
176
- end
181
+ def _alias
182
+ token = @scanner.get_token
183
+ AliasEvent.new(token.value, token.start_mark, token.end_mark)
184
+ end
185
+
186
+ def block_sequence_start
187
+ end_mark = @scanner.peek_token.start_mark
188
+ implicit = @tags.last.nil? || @tags.last == ?!
189
+ @tokens = token = @scanner.get_token
190
+ SequenceStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,false)
191
+ end
192
+
193
+ def block_indentless_sequence_start
194
+ end_mark = @scanner.peek_token.end_mark
195
+ implicit = @tags.last.nil? || @tags.last == ?!
196
+ SequenceStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,false)
197
+ end
198
+
199
+ def block_sequence_end
200
+ if !@scanner.peek_token.__is_block_end
201
+ token = @scanner.peek_token
202
+ raise ParserError.new("while scanning a block collection", @start_marks.last,"expected <block end>, but found #{token.tid}: #{token.inspect}", token.start_mark)
177
203
  end
178
- if !@tag_handles.empty?
179
- value = @yaml_version, @tag_handles.dup
180
- else
181
- value = @yaml_version, nil
204
+ token = @scanner.get_token
205
+ SequenceEndEvent.new(token.start_mark, token.end_mark)
206
+ end
207
+
208
+ def block_indentless_sequence_end
209
+ @tokens = token = @scanner.peek_token
210
+ SequenceEndEvent.new(token.start_mark, token.end_mark)
211
+ end
212
+
213
+ def block_mapping_start
214
+ end_mark = @scanner.peek_token.start_mark
215
+ implicit = @tags.last.nil? || @tags.last == ?!
216
+ @tokens = token = @scanner.get_token
217
+ MappingStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,false)
218
+ end
219
+
220
+ def block_mapping_end
221
+ if !@scanner.peek_token.__is_block_end
222
+ token = @scanner.peek_token
223
+ raise ParserError.new("while scanning a block mapping", @start_marks.last,"expected <block end>, but found #{token.tid}", token.start_mark)
182
224
  end
183
- for key in DEFAULT_TAGS.keys
184
- @tag_handles[key] = DEFAULT_TAGS[key] if !@tag_handles.include?(key)
225
+ @tokens = token = @scanner.get_token
226
+ MappingEndEvent.new(token.start_mark, token.end_mark)
227
+ end
228
+
229
+ def flow_sequence_start
230
+ end_mark = @scanner.peek_token.end_mark
231
+ implicit = @tags.last.nil? || @tags.last == ?!
232
+ @tokens = token = @scanner.get_token
233
+ SequenceStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,true)
234
+ end
235
+
236
+ def flow_sequence_end
237
+ @tokens = token = @scanner.get_token
238
+ SequenceEndEvent.new(token.start_mark, token.end_mark)
239
+ end
240
+
241
+ def flow_internal_mapping_start
242
+ @tokens = token = @scanner.get_token
243
+ MappingStartEvent.new(nil,nil,true,token.start_mark, token.end_mark,true)
244
+ end
245
+
246
+ def flow_internal_mapping_end
247
+ token = peek_token
248
+ MappingEndEvent.new(token.start_mark, token.start_mark)
249
+ end
250
+
251
+ def flow_mapping_start
252
+ end_mark = @scanner.peek_token.end_mark
253
+ implicit = @tags.last.nil? || @tags.last == ?!
254
+ @tokens = token = @scanner.get_token
255
+ MappingStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,true)
256
+ end
257
+
258
+ def flow_mapping_end
259
+ @tokens = token = @scanner.get_token
260
+ MappingEndEvent.new(token.start_mark, token.end_mark)
261
+ end
262
+
263
+ def scalar
264
+ token = @scanner.get_token
265
+ end_mark = token.end_mark
266
+ if (token.plain && @tags.last.nil?) || @tags.last == ?!
267
+ implicit = [true, false]
268
+ elsif @tags.last.nil?
269
+ implicit = [false, true]
270
+ else
271
+ implicit = [false, false]
185
272
  end
186
- value
273
+ ScalarEvent.new(@anchors.last, @tags.last, implicit, token.value, @start_marks.last, end_mark, token.style)
187
274
  end
188
275
 
189
- def parse_block_node
190
- parse_node(true)
276
+ def empty_scalar
277
+ process_empty_scalar(@tokens.end_mark)
191
278
  end
192
279
 
193
- def parse_flow_node
194
- parse_node
280
+
281
+ # PRODUCTIONS
282
+ def stream
283
+ @parse_stack += [:stream_end, :explicit_document, :implicit_document]
284
+ stream_start
195
285
  end
196
-
197
- def parse_block_node_or_indentless_sequence
198
- parse_node(true, true)
199
- end
200
-
201
- def parse_node(block=false, indentless_sequence=false)
202
- # block_node ::= ALIAS | properties? block_content
203
- # flow_node ::= ALIAS | properties? flow_content
204
- # properties ::= TAG ANCHOR? | ANCHOR TAG?
205
- # block_content ::= block_collection | flow_collection | SCALAR
206
- # flow_content ::= flow_collection | SCALAR
207
- # block_collection ::= block_sequence | block_mapping
208
- # block_node_or_indentless_sequence ::= ALIAS | properties?
209
- # (block_content | indentless_block_sequence)
210
- events = []
211
- if check_token(AliasToken)
212
- token = get_token
213
- events << AliasEvent.new(token.value, token.start_mark, token.end_mark)
286
+
287
+ def implicit_document
288
+ curr = @scanner.peek_token
289
+ unless curr.__is_directive || curr.__is_document_start || curr.__is_stream_end
290
+ @parse_stack += [:document_end, :block_node]
291
+ return document_start_implicit
292
+ end
293
+ nil
294
+ end
295
+
296
+ def explicit_document
297
+ if !@scanner.peek_token.__is_stream_end
298
+ @parse_stack += [:explicit_document, :document_end, :block_node]
299
+ return document_start
300
+ end
301
+ nil
302
+ end
303
+
304
+ def block_node
305
+ curr = @scanner.peek_token
306
+ if curr.__is_directive || curr.__is_document_start || curr.__is_document_end || curr.__is_stream_end
307
+ return empty_scalar
214
308
  else
215
- anchor = nil
216
- tag = nil
217
- start_mark = end_mark = tag_mark = nil
218
- if check_token(AnchorToken)
219
- token = get_token
220
- start_mark = token.start_mark
221
- end_mark = token.end_mark
222
- anchor = token.value
223
- if check_token(TagToken)
224
- token = get_token
225
- tag_mark = token.start_mark
226
- end_mark = token.end_mark
227
- tag = token.value
228
- end
229
- elsif check_token(TagToken)
230
- token = get_token
231
- start_mark = tag_mark = token.start_mark
309
+ if curr.__is_alias
310
+ return _alias
311
+ else
312
+ @parse_stack << :un_properties
313
+ properties
314
+ return block_content
315
+ end
316
+ end
317
+ end
318
+
319
+ def flow_node
320
+ if @scanner.peek_token.__is_alias
321
+ return _alias
322
+ else
323
+ @parse_stack << :un_properties
324
+ properties
325
+ return flow_content
326
+ end
327
+ end
328
+
329
+ def properties
330
+ anchor = nil
331
+ tag = nil
332
+ start_mark = end_mark = tag_mark = nil
333
+ if @scanner.peek_token.__is_anchor
334
+ token = @scanner.get_token
335
+ start_mark = token.start_mark
336
+ end_mark = token.end_mark
337
+ anchor = token.value
338
+ if @scanner.peek_token.__is_tag
339
+ token = @scanner.get_token
340
+ tag_mark = token.start_mark
232
341
  end_mark = token.end_mark
233
342
  tag = token.value
234
- if check_token(AnchorToken)
235
- token = get_token
236
- end_mark = token.end_mark
237
- anchor = token.value
238
- end
239
343
  end
344
+ elsif @scanner.peek_token.__is_tag
345
+ token = @scanner.get_token
346
+ start_mark = tag_mark = token.start_mark
347
+ end_mark = token.end_mark
348
+ tag = token.value
349
+ if @scanner.peek_token.__is_anchor
350
+ token = @scanner.get_token
351
+ end_mark = token.end_mark
352
+ anchor = token.value
353
+ end
354
+ end
355
+
356
+ if !tag.nil? and tag != "!"
357
+ handle, suffix = tag
358
+ if !handle.nil?
359
+ raise ParserError.new("while parsing a node", start_mark,"found undefined tag handle #{handle}",tag_mark) if !@tag_handles.include?(handle)
360
+ tag = @tag_handles[handle]+suffix
361
+ else
362
+ tag = suffix
363
+ end
364
+ end
365
+ if start_mark.nil?
366
+ start_mark = end_mark = @scanner.peek_token.start_mark
367
+ end
368
+ @anchors << anchor
369
+ @tags << tag
370
+ @start_marks << start_mark
371
+ @end_marks << end_mark
372
+ nil
373
+ end
374
+
375
+ def un_properties
376
+ @anchors.pop
377
+ @tags.pop
378
+ @start_marks.pop
379
+ @end_marks.pop
380
+ nil
381
+ end
382
+
383
+ def block_content
384
+ token = @scanner.peek_token
385
+ if token.__is_block_sequence_start
386
+ return block_sequence
387
+ elsif token.__is_block_mapping_start
388
+ return block_mapping
389
+ elsif token.__is_flow_sequence_start
390
+ return flow_sequence
391
+ elsif token.__is_flow_mapping_start
392
+ return flow_mapping
393
+ elsif token.__is_scalar
394
+ return scalar
395
+ else
396
+ raise ParserError.new("while scanning a node", @start_marks.last,"expected the node content, but found #{token.tid}",token.start_mark)
397
+ end
398
+ end
240
399
 
241
- if !tag.nil? and tag != "!"
242
- handle, suffix = tag
243
- if !handle.nil?
244
- raise ParserError.new("while parsing a node", start_mark,"found undefined tag handle #{handle}",tag_mark) if !@tag_handles.include?(handle)
245
- tag = @tag_handles[handle]+suffix
400
+ def flow_content
401
+ token = @scanner.peek_token
402
+ if token.__is_flow_sequence_start
403
+ return flow_sequence
404
+ elsif token.__is_flow_mapping_start
405
+ return flow_mapping
406
+ elsif token.__is_scalar
407
+ return scalar
408
+ else
409
+ raise ParserError.new("while scanning a flow node", @start_marks.last,"expected the node content, but found #{token.tid}",token.start_mark)
410
+ end
411
+ end
412
+
413
+ def block_sequence_entry
414
+ if @scanner.peek_token.__is_block_entry
415
+ @tokens = token = @scanner.get_token
416
+ if !(@scanner.peek_token.__is_block_entry || @scanner.peek_token.__is_block_end)
417
+ @parse_stack += [:block_sequence_entry]
418
+ return block_node
419
+ else
420
+ @parse_steck += [:block_sequence_entry]
421
+ return empty_scalar
422
+ end
423
+ end
424
+ nil
425
+ end
426
+
427
+ def block_mapping_entry
428
+ # ((KEY block_node_or_indentless_sequence?)? (VALUE block_node_or_indentless_sequence?)?)*
429
+ if @scanner.peek_token.__is_key || @scanner.peek_token.__is_value
430
+ if @scanner.check_token(KeyToken)
431
+ @tokens = token = @scanner.get_token
432
+ curr = @scanner.peek_token
433
+ if !(curr.__is_key || curr.__is_value || curr.__is_block_end)
434
+ @parse_stack += [:block_mapping_entry,:block_mapping_entry_value]
435
+ return block_node_or_indentless_sequence
246
436
  else
247
- tag = suffix
437
+ @parse_stack += [:block_mapping_entry,:block_mapping_entry_value]
438
+ return empty_scalar
248
439
  end
440
+ else
441
+ @parse_stack += [:block_mapping_entry,:block_mapping_entry_value]
442
+ return empty_scalar
249
443
  end
444
+ end
445
+ nil
446
+ end
250
447
 
251
- #if tag == u'!':
252
- # raise ParserError("while parsing a node", start_mark,
253
- # "found non-specific tag '!'", tag_mark,
254
- # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
255
- if start_mark.nil?
256
- start_mark = end_mark = peek_token.start_mark
257
- end
258
- event = nil
259
- collection_events = nil
260
- implicit = tag.nil? || tag == ?!
261
- if indentless_sequence && check_token(BlockEntryToken)
262
- end_mark = peek_token.end_mark
263
- event = SequenceStartEvent.new(anchor, tag, implicit, start_mark, end_mark)
264
- collection_events = parse_indentless_sequence
265
- else
266
- if check_token(ScalarToken)
267
- token = get_token
268
- end_mark = token.end_mark
269
- if (token.plain && tag.nil?) || tag == ?!
270
- implicit = [true, false]
271
- elsif tag.nil?
272
- implicit = [false, true]
273
- else
274
- implicit = [false, false]
275
- end
276
- event = ScalarEvent.new(anchor, tag, implicit, token.value,start_mark, end_mark,token.style)
277
- elsif check_token(FlowSequenceStartToken)
278
- end_mark = peek_token.end_mark
279
- event = SequenceStartEvent.new(anchor, tag, implicit, start_mark, end_mark,true)
280
- collection_events = parse_flow_sequence
281
- elsif check_token(FlowMappingStartToken)
282
- end_mark = peek_token.end_mark
283
- event = MappingStartEvent.new(anchor, tag, implicit, start_mark, end_mark,true)
284
- collection_events = parse_flow_mapping
285
- elsif block && check_token(BlockSequenceStartToken)
286
- end_mark = peek_token.start_mark
287
- event = SequenceStartEvent.new(anchor, tag, implicit, start_mark, end_mark,false)
288
- collection_events = parse_block_sequence
289
- elsif block && check_token(BlockMappingStartToken)
290
- end_mark = peek_token.start_mark
291
- event = MappingStartEvent.new(anchor, tag, implicit, start_mark, end_mark,false)
292
- collection_events = parse_block_mapping
293
- elsif !anchor.nil? || !tag.nil?
294
- # Empty scalars are allowed even if a tag or an anchor is
295
- # specified.
296
- event = ScalarEvent.new(anchor, tag, [implicit,false],"",start_mark, end_mark)
448
+ def block_mapping_entry_value
449
+ if @scanner.peek_token.__is_key || @scanner.peek_token.__is_value
450
+ if @scanner.peek_token.__is_value
451
+ @tokens = token = @scanner.get_token
452
+ curr = @scanner.peek_token
453
+ if !(curr.__is_key || curr.__is_value || curr.__is_block_end)
454
+ return block_node_or_indentless_sequence
297
455
  else
298
- if block
299
- node = "block"
300
- else
301
- node = "flow"
302
- end
303
- token = peek_token
304
- raise ParserError.new("while scanning a #{node} node", start_mark,"expected the node content, but found #{token.tid}",token.start_mark)
456
+ return empty_scalar
305
457
  end
458
+ else
459
+ @tokens = token = @scanner.peek_token
460
+ return empty_scalar
306
461
  end
307
- events << event
308
- events += collection_events if collection_events
309
462
  end
310
- events
463
+ nil
311
464
  end
312
465
 
313
- def parse_block_sequence
314
- # BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
315
- events = []
316
- token = get_token
317
- start_mark = token.start_mark
318
- while check_token(BlockEntryToken)
319
- token = get_token
320
- if !check_token(BlockEntryToken, BlockEndToken)
321
- events += parse_block_node
466
+ def block_sequence
467
+ @parse_stack += [:block_sequence_end,:block_sequence_entry]
468
+ block_sequence_start
469
+ end
470
+
471
+ def block_mapping
472
+ @parse_stack += [:block_mapping_end,:block_mapping_entry]
473
+ block_mapping_start
474
+ end
475
+
476
+ def block_node_or_indentless_sequence
477
+ if @scanner.peek_token.__is_alias
478
+ return _alias
479
+ else
480
+ if @scanner.peek_token.__is_block_entry
481
+ properties
482
+ return indentless_block_sequence
322
483
  else
323
- events << process_empty_scalar(token.end_mark)
484
+ properties
485
+ return block_content
324
486
  end
325
487
  end
326
- if !check_token(BlockEndToken)
327
- token = peek_token
328
- raise ParserError.new("while scanning a block collection", start_mark,"expected <block end>, but found #{token.tid}", token.start_mark)
329
- end
330
- token = get_token
331
- events << SequenceEndEvent.new(token.start_mark, token.end_mark)
332
- events
333
- end
334
-
335
- def parse_indentless_sequence
336
- # (BLOCK-ENTRY block_node?)+
337
- events = []
338
- while check_token(BlockEntryToken)
339
- token = get_token
340
- if !check_token(BlockEntryToken,KeyToken, ValueToken, BlockEndToken)
341
- events += parse_block_node
488
+ end
489
+
490
+ def indentless_block_sequence
491
+ @parse_stack += [:block_indentless_sequence_end,:indentless_block_sequence_entry]
492
+ block_indentless_sequence_start
493
+ end
494
+
495
+ def indentless_block_sequence_entry
496
+ if @scanner.peek_token.__is_block_entry
497
+ @tokens = @scanner.get_token
498
+ curr = @scanner.peek_token
499
+ if !(curr.__is_block_entry || curr.__is_key || curr.__is_value || curr.__is_block_end)
500
+ @parse_stack << :indentless_block_sequence_entry
501
+ return block_node
342
502
  else
343
- events << process_empty_scalar(token.end_mark)
503
+ @parse_stack << :indentless_block_sequence_entry
504
+ return empty_scalar
344
505
  end
345
506
  end
346
- token = peek_token
347
- events << SequenceEndEvent.new(token.start_mark, token.start_mark)
348
- events
507
+ nil
349
508
  end
350
509
 
510
+ def flow_sequence
511
+ @parse_stack += [:flow_sequence_end,:flow_sequence_entry]
512
+ flow_sequence_start
513
+ end
351
514
 
352
- def parse_block_mapping
353
- # BLOCK-MAPPING_START
354
- # ((KEY block_node_or_indentless_sequence?)?
355
- # (VALUE block_node_or_indentless_sequence?)?)*
356
- # BLOCK-END
357
- events = []
358
- token = get_token
359
- start_mark = token.start_mark
360
- while check_token(KeyToken, ValueToken)
361
- if check_token(KeyToken)
362
- token = get_token
363
- if !check_token(KeyToken, ValueToken, BlockEndToken)
364
- events += parse_block_node_or_indentless_sequence
365
- else
366
- events << process_empty_scalar(token.end_mark)
367
- end
368
- end
369
- if check_token(ValueToken)
370
- token = get_token
371
- if !check_token(KeyToken, ValueToken, BlockEndToken)
372
- events += parse_block_node_or_indentless_sequence
373
- else
374
- events << process_empty_scalar(token.end_mark)
375
- end
515
+ def flow_mapping
516
+ @parse_stack += [:flow_mapping_end,:flow_mapping_entry]
517
+ flow_mapping_start
518
+ end
519
+
520
+ def flow_sequence_entry
521
+ if !@scanner.peek_token.__is_flow_sequence_end
522
+ if @scanner.peek_token.__is_key
523
+ @parse_stack += [:flow_sequence_entry,:flow_entry_marker,:flow_internal_mapping_end,:flow_internal_value,:flow_internal_content]
524
+ return flow_internal_mapping_start
376
525
  else
377
- token = peek_token
378
- events << process_empty_scalar(token.start_mark)
526
+ @parse_stack += [:flow_sequence_entry,:flow_node]
527
+ return flow_entry_marker
379
528
  end
380
529
  end
381
- if !check_token(BlockEndToken)
382
- token = peek_token
383
- raise ParserError.new("while scanning a block mapping", start_mark,"expected <block end>, but found #{token.tid}", token.start_mark)
530
+ nil
531
+ end
532
+
533
+ def flow_internal_content
534
+ token = @scanner.peek_token
535
+ if !(token.__is_value || token.__is_flow_entry || token.__is_flow_sequence_end)
536
+ flow_node
537
+ else
538
+ empty_scalar
384
539
  end
385
- token = get_token
386
- events << MappingEndEvent.new(token.start_mark, token.end_mark)
387
- events
388
- end
389
-
390
- def parse_flow_sequence
391
- # flow_sequence ::= FLOW-SEQUENCE-START
392
- # (flow_sequence_entry FLOW-ENTRY)*
393
- # flow_sequence_entry?
394
- # FLOW-SEQUENCE-END
395
- # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
396
- #
397
- # Note that while production rules for both flow_sequence_entry and
398
- # flow_mapping_entry are equal, their interpretations are different.
399
- # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
400
- # generate an inline mapping (set syntax).
401
- events = []
402
- token = get_token
403
- start_mark = token.start_mark
404
- while !check_token(FlowSequenceEndToken)
405
- if check_token(KeyToken)
406
- token = get_token
407
- events << MappingStartEvent.new(nil,nil,true,token.start_mark, token.end_mark,true)
408
- if !check_token(ValueToken,FlowEntryToken, FlowSequenceEndToken)
409
- events += parse_flow_node
410
- else
411
- events << process_empty_scalar(token.end_mark)
412
- end
413
- if check_token(ValueToken)
414
- token = get_token
415
- if !check_token(FlowEntryToken, FlowSequenceEndToken)
416
- events += parse_flow_node
417
- else
418
- events << process_empty_scalar(token.end_mark)
419
- end
420
- else
421
- token = peek_token
422
- events << process_empty_scalar(token.start_mark)
423
- end
424
- token = peek_token
425
- events << MappingEndEvent.new(token.start_mark, token.start_mark)
540
+ end
541
+
542
+ def flow_internal_value
543
+ if @scanner.peek_token.__is_value
544
+ @tokens = token = @scanner.get_token
545
+ if !(@scanner.peek_token.__is_flow_entry || @scanner.peek_token.__is_flow_sequence_end)
546
+ flow_node
426
547
  else
427
- events += parse_flow_node
428
- end
429
- if !check_token(FlowEntryToken, FlowSequenceEndToken)
430
- token = peek_token
431
- raise ParserError.new("while scanning a flow sequence", start_mark,"expected ',' or ']', but got #{token.tid}", token.start_mark)
548
+ empty_scalar
432
549
  end
433
- if check_token(FlowEntryToken)
434
- get_token
550
+ else
551
+ @tokens = token = @scanner.peek_token
552
+ empty_scalar
553
+ end
554
+ end
555
+
556
+ def flow_entry_marker
557
+ if @scanner.peek_token.__is_flow_entry
558
+ @scanner.get_token
559
+ end
560
+ nil
561
+ end
562
+
563
+ def flow_mapping_entry
564
+ if !@scanner.peek_token.__is_flow_mapping_end
565
+ if @scanner.peek_token.__is_key
566
+ @parse_stack += [:flow_mapping_entry,:flow_entry_marker,:flow_mapping_internal_value]
567
+ return flow_mapping_internal_content
568
+ else
569
+ @parse_stack += [:flow_mapping_entry,:flow_node]
570
+ return flow_entry_marker
435
571
  end
436
572
  end
437
- token = get_token
438
- events << SequenceEndEvent.new(token.start_mark, token.end_mark)
439
- events
573
+ nil
440
574
  end
441
-
442
- def parse_flow_mapping
443
- # flow_mapping ::= FLOW-MAPPING-START
444
- # (flow_mapping_entry FLOW-ENTRY)*
445
- # flow_mapping_entry?
446
- # FLOW-MAPPING-END
447
- # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
448
- events = []
449
- token = get_token
450
- start_mark = token.start_mark
451
- while !check_token(FlowMappingEndToken)
452
- if check_token(KeyToken)
453
- token = get_token
454
- if !check_token(ValueToken,FlowEntryToken, FlowMappingEndToken)
455
- events += parse_flow_node
456
- else
457
- events << process_empty_scalar(token.end_mark)
458
- end
459
- if check_token(ValueToken)
460
- token = get_token
461
- if !check_token(FlowEntryToken, FlowMappingEndToken)
462
- events += parse_flow_node
463
- else
464
- events << process_empty_scalar(token.end_mark)
465
- end
466
- else
467
- token = peek_token
468
- events << process_empty_scalar(token.start_mark)
469
- end
575
+
576
+ def flow_mapping_internal_content
577
+ curr = @scanner.peek_token
578
+ if !(curr.__is_value || curr.__is_flow_entry || curr.__is_flow_mapping_end)
579
+ @tokens = token = @scanner.get_token
580
+ flow_node
581
+ else
582
+ empty_scalar
583
+ end
584
+ end
585
+
586
+ def flow_mapping_internal_value
587
+ if @scanner.peek_token.__is_value
588
+ @tokens = token = @scanner.get_token
589
+ if !(@scanner.peek_token.__is_flow_entry || @scanner.peek_token.__is_flow_mapping_end)
590
+ flow_node
470
591
  else
471
- events += parse_flow_node
472
- events << process_empty_scalar(peek_token.start_mark)
592
+ empty_scalar
473
593
  end
474
- if !check_token(FlowEntryToken, FlowMappingEndToken)
475
- token = peek_token
476
- raise ParserError.new("while scanning a flow mapping", start_mark,"expected ',' or '}', but got #{token.tid}", token.start_mark)
594
+ else
595
+ @tokens = token = @scanner.peek_token
596
+ empty_scalar
597
+ end
598
+ end
599
+
600
+
601
+ def process_directives
602
+ # DIRECTIVE*
603
+ while @scanner.peek_token.__is_directive
604
+ token = @scanner.get_token
605
+ if token.name == "YAML"
606
+ raise ParserError.new(nil, nil,"found duplicate YAML directive", token.start_mark) if !@yaml_version.nil?
607
+ major, minor = token.value[0].to_i, token.value[1].to_i
608
+ raise ParserError.new(nil,nil,"found incompatible YAML document (version 1.* is required)",token.start_mark) if major != 1
609
+ @yaml_version = [major,minor]
610
+ elsif token.name == "TAG"
611
+ handle, prefix = token.value
612
+ raise ParserError.new(nil,nil,"duplicate tag handle #{handle}",token.start_mark) if @tag_handles.member?(handle)
613
+ @tag_handles[handle] = prefix
477
614
  end
478
- get_token if check_token(FlowEntryToken)
479
615
  end
480
- if !check_token(FlowMappingEndToken)
481
- token = peek_token
482
- raise ParserError.new("while scanning a flow mapping", start_mark,"expected '}', but found #{token.tid}", token.start_mark)
616
+ if !@tag_handles.empty?
617
+ value = @yaml_version, @tag_handles.dup
618
+ else
619
+ value = @yaml_version, nil
620
+ end
621
+ for key in DEFAULT_TAGS.keys
622
+ @tag_handles[key] = DEFAULT_TAGS[key] if !@tag_handles.include?(key)
483
623
  end
484
- token = get_token
485
- events << MappingEndEvent.new(token.start_mark, token.end_mark)
486
- events
624
+ value
487
625
  end
488
626
 
489
627
  def process_empty_scalar(mark)