RbYAML 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/lib/rbyaml.rb +14 -256
- data/lib/rbyaml.rb.~1.2.~ +383 -0
- data/lib/rbyaml/composer.rb +9 -11
- data/lib/rbyaml/{composer.rb.~1.2.~ → composer.rb.~1.3.~} +28 -25
- data/lib/rbyaml/constants.rb +95 -0
- data/lib/rbyaml/constructor.rb +180 -89
- data/lib/rbyaml/{constructor.rb.~1.2.~ → constructor.rb.~1.9.~} +137 -95
- data/lib/rbyaml/dumper.rb +12 -9
- data/lib/rbyaml/dumper.rb.~1.3.~ +36 -0
- data/lib/rbyaml/emitter.rb +14 -28
- data/lib/rbyaml/{emitter.rb.~1.2.~ → emitter.rb.~1.6.~} +22 -33
- data/lib/rbyaml/error.rb +4 -57
- data/lib/rbyaml/error.rb.~1.2.~ +75 -0
- data/lib/rbyaml/events.rb +8 -14
- data/lib/rbyaml/{events.rb.~1.2.~ → events.rb.~1.4.~} +29 -6
- data/lib/rbyaml/nodes.rb +5 -5
- data/lib/rbyaml/{nodes.rb.~1.2.~ → nodes.rb.~1.3.~} +13 -9
- data/lib/rbyaml/parser.rb +70 -108
- data/lib/rbyaml/parser.rb.~1.4.~ +632 -0
- data/lib/rbyaml/representer.rb +19 -157
- data/lib/rbyaml/representer.rb.old +317 -0
- data/lib/rbyaml/{representer.rb.~1.2.~ → representer.rb.~1.5.~} +60 -26
- data/lib/rbyaml/resolver.rb +6 -6
- data/lib/rbyaml/{resolver.rb.~1.1.~ → resolver.rb.~1.6.~} +20 -20
- data/lib/rbyaml/rubytypes.rb +391 -0
- data/lib/rbyaml/scanner.rb +123 -225
- data/lib/rbyaml/{scanner.rb.~1.2.~ → scanner.rb.~1.5.~} +466 -378
- data/lib/rbyaml/serializer.rb +9 -9
- data/lib/rbyaml/{serializer.rb.~1.2.~ → serializer.rb.~1.4.~} +19 -17
- data/lib/rbyaml/stream.rb +48 -0
- data/lib/rbyaml/tag.rb +72 -0
- data/lib/rbyaml/tokens.rb +22 -16
- data/lib/rbyaml/{tokens.rb.~1.2.~ → tokens.rb.~1.3.~} +44 -4
- data/lib/rbyaml/types.rb +146 -0
- data/lib/rbyaml/util.rb.~1.3.~ +38 -0
- data/lib/rbyaml/yaml.rb +22 -32
- data/lib/rbyaml/{yaml.rb.~1.2.~ → yaml.rb.~1.5.~} +17 -17
- data/test/load_one.rb +6 -0
- data/test/load_one_yaml.rb +6 -0
- data/test/output_events.rb +9 -0
- data/test/test_add_ctor.rb +51 -0
- data/test/test_add_ctor.rb.~1.1.~ +30 -0
- data/test/test_bm.rb +2 -2
- data/test/test_bm.rb.~1.1.~ +28 -0
- data/test/test_gems.rb +10 -0
- data/test/test_one.rb.~1.1.~ +5 -0
- data/test/test_one_syck.rb +5 -0
- data/test/test_rbyaml.rb +63 -32
- data/test/test_rbyaml.rb.~1.6.~ +59 -0
- data/test/{test_rbyaml.rb.~1.2.~ → test_rbyaml_old.rb} +13 -4
- data/test/test_time_events.rb +24 -0
- data/test/test_time_nodes.rb +24 -0
- data/test/test_time_tokens.rb +24 -0
- data/test/yaml/gems_new.yml +147456 -0
- data/test/yaml/test1.rb +8 -0
- data/test/yaml/test10.rb +14 -0
- data/test/yaml/test11.rb +13 -0
- data/test/yaml/test12.rb +9 -0
- data/test/yaml/test13.rb +9 -0
- data/test/yaml/test14.rb +13 -0
- data/test/yaml/test15.rb +12 -0
- data/test/yaml/test16.rb +11 -0
- data/test/yaml/test16.rb.~1.1.~ +11 -0
- data/test/yaml/test17.rb +10 -0
- data/test/yaml/test18.rb +13 -0
- data/test/yaml/test19.rb +9 -0
- data/test/yaml/test19.yml +1 -1
- data/test/yaml/test2.rb +8 -0
- data/test/yaml/test20.rb +11 -0
- data/test/yaml/test20.rb.~1.1.~ +9 -0
- data/test/yaml/test20.yml +1 -1
- data/test/yaml/test3.rb +13 -0
- data/test/yaml/test4.rb +13 -0
- data/test/yaml/test5.rb +8 -0
- data/test/yaml/test6.rb +10 -0
- data/test/yaml/test7.rb +15 -0
- data/test/yaml/test8.rb +15 -0
- data/test/yaml/test9.rb +13 -0
- metadata +61 -16
- data/lib/rbyaml/dumper.rb.~1.2.~ +0 -43
- data/lib/rbyaml/parser.rb.~1.2.~ +0 -494
@@ -0,0 +1,632 @@
|
|
1
|
+
|
2
|
+
# YAML can be parsed by an LL(1) parser!
|
3
|
+
#
|
4
|
+
# We use the following production rules:
|
5
|
+
# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
|
6
|
+
# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END?
|
7
|
+
# implicit_document ::= block_node DOCUMENT-END?
|
8
|
+
# block_node ::= ALIAS | properties? block_content
|
9
|
+
# flow_node ::= ALIAS | properties? flow_content
|
10
|
+
# properties ::= TAG ANCHOR? | ANCHOR TAG?
|
11
|
+
# block_content ::= block_collection | flow_collection | SCALAR
|
12
|
+
# flow_content ::= flow_collection | SCALAR
|
13
|
+
# block_collection ::= block_sequence | block_mapping
|
14
|
+
# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
|
15
|
+
# block_mapping ::= BLOCK-MAPPING_START ((KEY block_node_or_indentless_sequence?)? (VALUE block_node_or_indentless_sequence?)?)* BLOCK-END
|
16
|
+
# block_node_or_indentless_sequence ::= ALIAS | properties? (block_content | indentless_block_sequence)
|
17
|
+
# indentless_block_sequence ::= (BLOCK-ENTRY block_node?)+
|
18
|
+
# flow_collection ::= flow_sequence | flow_mapping
|
19
|
+
# flow_sequence ::= FLOW-SEQUENCE-START (flow_sequence_entry FLOW-ENTRY)* flow_sequence_entry? FLOW-SEQUENCE-END
|
20
|
+
# flow_mapping ::= FLOW-MAPPING-START (flow_mapping_entry FLOW-ENTRY)* flow_mapping_entry? FLOW-MAPPING-END
|
21
|
+
# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
22
|
+
# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
|
23
|
+
|
24
|
+
# TODO: support for BOM within a stream.
|
25
|
+
# stream ::= (BOM? implicit_document)? (BOM? explicit_document)* STREAM-END
|
26
|
+
|
27
|
+
# FIRST sets:
|
28
|
+
# stream: { STREAM-START }
|
29
|
+
# explicit_document: { DIRECTIVE DOCUMENT-START }
|
30
|
+
# implicit_document: FIRST(block_node)
|
31
|
+
# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
32
|
+
# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
33
|
+
# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
34
|
+
# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
|
35
|
+
# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
|
36
|
+
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
37
|
+
# block_sequence: { BLOCK-SEQUENCE-START }
|
38
|
+
# block_mapping: { BLOCK-MAPPING-START }
|
39
|
+
# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
|
40
|
+
# indentless_sequence: { ENTRY }
|
41
|
+
# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
|
42
|
+
# flow_sequence: { FLOW-SEQUENCE-START }
|
43
|
+
# flow_mapping: { FLOW-MAPPING-START }
|
44
|
+
# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
45
|
+
# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
|
46
|
+
|
47
|
+
require 'rbyaml/error'
|
48
|
+
require 'rbyaml/tokens'
|
49
|
+
require 'rbyaml/events'
|
50
|
+
require 'rbyaml/scanner'
|
51
|
+
|
52
|
+
module RbYAML
|
53
|
+
class ParserError < MarkedYAMLError
|
54
|
+
end
|
55
|
+
|
56
|
+
class Parser
|
57
|
+
DEFAULT_TAGS = {
|
58
|
+
'!' => '!',
|
59
|
+
'!!' => 'tag:yaml.org,2002:'
|
60
|
+
}
|
61
|
+
|
62
|
+
def initialize(scanner)
|
63
|
+
@scanner = scanner
|
64
|
+
@current_event = nil
|
65
|
+
@yaml_version = nil
|
66
|
+
@events = nil
|
67
|
+
@working_events = nil
|
68
|
+
@tag_handles = { }
|
69
|
+
@parse_stack = nil
|
70
|
+
@start_mark = []
|
71
|
+
@tks = []
|
72
|
+
|
73
|
+
end
|
74
|
+
|
75
|
+
def check_event(*choices)
|
76
|
+
parse_stream
|
77
|
+
@current_event = parse_stream_next if @current_event.nil?
|
78
|
+
if @current_event
|
79
|
+
return true if choices.empty?
|
80
|
+
for choice in choices
|
81
|
+
return true if choice === @current_event
|
82
|
+
end
|
83
|
+
end
|
84
|
+
false
|
85
|
+
end
|
86
|
+
|
87
|
+
def peek_event
|
88
|
+
parse_stream
|
89
|
+
@current_event = parse_stream_next unless @current_event
|
90
|
+
@current_event
|
91
|
+
end
|
92
|
+
|
93
|
+
def get_event
|
94
|
+
parse_stream
|
95
|
+
@current_event = parse_stream_next unless @current_event
|
96
|
+
value = @current_event
|
97
|
+
@current_event = nil
|
98
|
+
value
|
99
|
+
end
|
100
|
+
|
101
|
+
def each_event
|
102
|
+
parse_stream
|
103
|
+
while @current_event = parse_stream_next
|
104
|
+
yield @current_event
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
def parse_stream
|
109
|
+
if !@parse_stack
|
110
|
+
@parse_stack = [:stream]
|
111
|
+
@tokens = nil
|
112
|
+
@tags = []
|
113
|
+
@anchors = []
|
114
|
+
@start_marks = []
|
115
|
+
@end_marks = []
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
def parse_stream_next
|
120
|
+
if !@parse_stack.empty?
|
121
|
+
while true
|
122
|
+
meth = @parse_stack.pop
|
123
|
+
#puts "our method: :#{meth}"
|
124
|
+
#puts "--- with peeked: :#{@scanner.peek_token.class} #{if @scanner.peek_token.respond_to?(:value): @scanner.peek_token.value.inspect; end}"
|
125
|
+
val = send(meth)
|
126
|
+
if !val.nil?
|
127
|
+
#puts "returning: #{val}"
|
128
|
+
return val
|
129
|
+
end
|
130
|
+
end
|
131
|
+
else
|
132
|
+
@tokens = nil
|
133
|
+
@tags = []
|
134
|
+
@anchors = []
|
135
|
+
@start_marks = []
|
136
|
+
@end_marks = []
|
137
|
+
return nil
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
#TERMINALS, definitions
|
142
|
+
|
143
|
+
def stream_start
|
144
|
+
token = @scanner.get_token
|
145
|
+
StreamStartEvent.new(token.start_mark, token.end_mark,token.encoding)
|
146
|
+
end
|
147
|
+
|
148
|
+
def stream_end
|
149
|
+
token = @scanner.get_token
|
150
|
+
StreamEndEvent.new(token.start_mark, token.end_mark)
|
151
|
+
end
|
152
|
+
|
153
|
+
def document_start_implicit
|
154
|
+
token = @scanner.peek_token
|
155
|
+
version, tags = process_directives
|
156
|
+
DocumentStartEvent.new(token.start_mark,token.start_mark,false,version,tags)
|
157
|
+
end
|
158
|
+
|
159
|
+
def document_start
|
160
|
+
token = @scanner.peek_token
|
161
|
+
start_mark = token.start_mark
|
162
|
+
version, tags = process_directives
|
163
|
+
raise ParserError.new(nil, nil,"expected '<document start>', but found #{token.tid}",token.start_mark) unless @scanner.peek_token.__is_document_start
|
164
|
+
@token = token = @scanner.get_token
|
165
|
+
end_mark = token.end_mark
|
166
|
+
DocumentStartEvent.new(start_mark, end_mark,true,version,tags)
|
167
|
+
end
|
168
|
+
|
169
|
+
def document_end
|
170
|
+
token = @scanner.peek_token
|
171
|
+
start_mark = end_mark = token.start_mark
|
172
|
+
explicit = false
|
173
|
+
while @scanner.peek_token.__is_document_end
|
174
|
+
@tokens = token = @scanner.get_token
|
175
|
+
end_mark = token.end_mark
|
176
|
+
explicit = true
|
177
|
+
end
|
178
|
+
DocumentEndEvent.new(start_mark, end_mark, explicit)
|
179
|
+
end
|
180
|
+
|
181
|
+
def _alias
|
182
|
+
token = @scanner.get_token
|
183
|
+
AliasEvent.new(token.value, token.start_mark, token.end_mark)
|
184
|
+
end
|
185
|
+
|
186
|
+
def block_sequence_start
|
187
|
+
end_mark = @scanner.peek_token.start_mark
|
188
|
+
implicit = @tags.last.nil? || @tags.last == "!"
|
189
|
+
@tokens = token = @scanner.get_token
|
190
|
+
SequenceStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,false)
|
191
|
+
end
|
192
|
+
|
193
|
+
def block_indentless_sequence_start
|
194
|
+
end_mark = @scanner.peek_token.end_mark
|
195
|
+
implicit = @tags.last.nil? || @tags.last == "!"
|
196
|
+
SequenceStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,false)
|
197
|
+
end
|
198
|
+
|
199
|
+
def block_sequence_end
|
200
|
+
if !@scanner.peek_token.__is_block_end
|
201
|
+
token = @scanner.peek_token
|
202
|
+
raise ParserError.new("while scanning a block collection", @start_marks.last,"expected <block end>, but found #{token.tid}: #{token.inspect}", token.start_mark)
|
203
|
+
end
|
204
|
+
token = @scanner.get_token
|
205
|
+
SequenceEndEvent.new(token.start_mark, token.end_mark)
|
206
|
+
end
|
207
|
+
|
208
|
+
def block_indentless_sequence_end
|
209
|
+
@tokens = token = @scanner.peek_token
|
210
|
+
SequenceEndEvent.new(token.start_mark, token.end_mark)
|
211
|
+
end
|
212
|
+
|
213
|
+
def block_mapping_start
|
214
|
+
end_mark = @scanner.peek_token.start_mark
|
215
|
+
implicit = @tags.last.nil? || @tags.last == "!"
|
216
|
+
@tokens = token = @scanner.get_token
|
217
|
+
MappingStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,false)
|
218
|
+
end
|
219
|
+
|
220
|
+
def block_mapping_end
|
221
|
+
if !@scanner.peek_token.__is_block_end
|
222
|
+
token = @scanner.peek_token
|
223
|
+
raise ParserError.new("while scanning a block mapping", @start_marks.last,"expected <block end>, but found #{token.tid}", token.start_mark)
|
224
|
+
end
|
225
|
+
@tokens = token = @scanner.get_token
|
226
|
+
MappingEndEvent.new(token.start_mark, token.end_mark)
|
227
|
+
end
|
228
|
+
|
229
|
+
def flow_sequence_start
|
230
|
+
end_mark = @scanner.peek_token.end_mark
|
231
|
+
implicit = @tags.last.nil? || @tags.last == "!"
|
232
|
+
@tokens = token = @scanner.get_token
|
233
|
+
SequenceStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,true)
|
234
|
+
end
|
235
|
+
|
236
|
+
def flow_sequence_end
|
237
|
+
@tokens = token = @scanner.get_token
|
238
|
+
SequenceEndEvent.new(token.start_mark, token.end_mark)
|
239
|
+
end
|
240
|
+
|
241
|
+
def flow_internal_mapping_start
|
242
|
+
@tokens = token = @scanner.get_token
|
243
|
+
MappingStartEvent.new(nil,nil,true,token.start_mark, token.end_mark,true)
|
244
|
+
end
|
245
|
+
|
246
|
+
def flow_internal_mapping_end
|
247
|
+
token = @scanner.peek_token
|
248
|
+
MappingEndEvent.new(token.start_mark, token.start_mark)
|
249
|
+
end
|
250
|
+
|
251
|
+
def flow_mapping_start
|
252
|
+
end_mark = @scanner.peek_token.end_mark
|
253
|
+
implicit = @tags.last.nil? || @tags.last == "!"
|
254
|
+
@tokens = token = @scanner.get_token
|
255
|
+
MappingStartEvent.new(@anchors.last, @tags.last, implicit, @start_marks.last, end_mark,true)
|
256
|
+
end
|
257
|
+
|
258
|
+
def flow_mapping_end
|
259
|
+
@tokens = token = @scanner.get_token
|
260
|
+
MappingEndEvent.new(token.start_mark, token.end_mark)
|
261
|
+
end
|
262
|
+
|
263
|
+
def scalar
|
264
|
+
token = @scanner.get_token
|
265
|
+
end_mark = token.end_mark
|
266
|
+
if (token.plain && @tags.last.nil?) || @tags.last == "!"
|
267
|
+
implicit = [true, false]
|
268
|
+
elsif @tags.last.nil?
|
269
|
+
implicit = [false, true]
|
270
|
+
else
|
271
|
+
implicit = [false, false]
|
272
|
+
end
|
273
|
+
ScalarEvent.new(@anchors.last, @tags.last, implicit, token.value, @start_marks.last, end_mark, token.style)
|
274
|
+
end
|
275
|
+
|
276
|
+
def empty_scalar
|
277
|
+
process_empty_scalar(@tokens.end_mark)
|
278
|
+
end
|
279
|
+
|
280
|
+
|
281
|
+
# PRODUCTIONS
|
282
|
+
def stream
|
283
|
+
@parse_stack += [:stream_end, :explicit_document, :implicit_document]
|
284
|
+
stream_start
|
285
|
+
end
|
286
|
+
|
287
|
+
def implicit_document
|
288
|
+
curr = @scanner.peek_token
|
289
|
+
unless curr.__is_directive || curr.__is_document_start || curr.__is_stream_end
|
290
|
+
@parse_stack += [:document_end, :block_node]
|
291
|
+
return document_start_implicit
|
292
|
+
end
|
293
|
+
nil
|
294
|
+
end
|
295
|
+
|
296
|
+
def explicit_document
|
297
|
+
if !@scanner.peek_token.__is_stream_end
|
298
|
+
@parse_stack += [:explicit_document, :document_end, :block_node]
|
299
|
+
return document_start
|
300
|
+
end
|
301
|
+
nil
|
302
|
+
end
|
303
|
+
|
304
|
+
def block_node
|
305
|
+
curr = @scanner.peek_token
|
306
|
+
if curr.__is_directive || curr.__is_document_start || curr.__is_document_end || curr.__is_stream_end
|
307
|
+
return empty_scalar
|
308
|
+
else
|
309
|
+
if curr.__is_alias
|
310
|
+
return _alias
|
311
|
+
else
|
312
|
+
@parse_stack << :un_properties
|
313
|
+
properties
|
314
|
+
return block_content
|
315
|
+
end
|
316
|
+
end
|
317
|
+
end
|
318
|
+
|
319
|
+
def flow_node
|
320
|
+
if @scanner.peek_token.__is_alias
|
321
|
+
return _alias
|
322
|
+
else
|
323
|
+
@parse_stack << :un_properties
|
324
|
+
properties
|
325
|
+
return flow_content
|
326
|
+
end
|
327
|
+
end
|
328
|
+
|
329
|
+
def properties
|
330
|
+
anchor = nil
|
331
|
+
tag = nil
|
332
|
+
start_mark = end_mark = tag_mark = nil
|
333
|
+
if @scanner.peek_token.__is_anchor
|
334
|
+
token = @scanner.get_token
|
335
|
+
start_mark = token.start_mark
|
336
|
+
end_mark = token.end_mark
|
337
|
+
anchor = token.value
|
338
|
+
if @scanner.peek_token.__is_tag
|
339
|
+
token = @scanner.get_token
|
340
|
+
tag_mark = token.start_mark
|
341
|
+
end_mark = token.end_mark
|
342
|
+
tag = token.value
|
343
|
+
end
|
344
|
+
elsif @scanner.peek_token.__is_tag
|
345
|
+
token = @scanner.get_token
|
346
|
+
start_mark = tag_mark = token.start_mark
|
347
|
+
end_mark = token.end_mark
|
348
|
+
tag = token.value
|
349
|
+
if @scanner.peek_token.__is_anchor
|
350
|
+
token = @scanner.get_token
|
351
|
+
end_mark = token.end_mark
|
352
|
+
anchor = token.value
|
353
|
+
end
|
354
|
+
end
|
355
|
+
|
356
|
+
if !tag.nil? and tag != "!"
|
357
|
+
handle, suffix = tag
|
358
|
+
if !handle.nil?
|
359
|
+
raise ParserError.new("while parsing a node", start_mark,"found undefined tag handle #{handle}",tag_mark) if !@tag_handles.include?(handle)
|
360
|
+
tag = @tag_handles[handle]+suffix
|
361
|
+
else
|
362
|
+
tag = suffix
|
363
|
+
end
|
364
|
+
end
|
365
|
+
if start_mark.nil?
|
366
|
+
start_mark = end_mark = @scanner.peek_token.start_mark
|
367
|
+
end
|
368
|
+
@anchors << anchor
|
369
|
+
@tags << tag
|
370
|
+
@start_marks << start_mark
|
371
|
+
@end_marks << end_mark
|
372
|
+
nil
|
373
|
+
end
|
374
|
+
|
375
|
+
def un_properties
|
376
|
+
@anchors.pop
|
377
|
+
@tags.pop
|
378
|
+
@start_marks.pop
|
379
|
+
@end_marks.pop
|
380
|
+
nil
|
381
|
+
end
|
382
|
+
|
383
|
+
def block_content
|
384
|
+
token = @scanner.peek_token
|
385
|
+
if token.__is_block_sequence_start
|
386
|
+
return block_sequence
|
387
|
+
elsif token.__is_block_mapping_start
|
388
|
+
return block_mapping
|
389
|
+
elsif token.__is_flow_sequence_start
|
390
|
+
return flow_sequence
|
391
|
+
elsif token.__is_flow_mapping_start
|
392
|
+
return flow_mapping
|
393
|
+
elsif token.__is_scalar
|
394
|
+
return scalar
|
395
|
+
else
|
396
|
+
raise ParserError.new("while scanning a node", @start_marks.last,"expected the node content, but found #{token.tid}",token.start_mark)
|
397
|
+
end
|
398
|
+
end
|
399
|
+
|
400
|
+
def flow_content
|
401
|
+
token = @scanner.peek_token
|
402
|
+
if token.__is_flow_sequence_start
|
403
|
+
return flow_sequence
|
404
|
+
elsif token.__is_flow_mapping_start
|
405
|
+
return flow_mapping
|
406
|
+
elsif token.__is_scalar
|
407
|
+
return scalar
|
408
|
+
else
|
409
|
+
raise ParserError.new("while scanning a flow node", @start_marks.last,"expected the node content, but found #{token.tid}",token.start_mark)
|
410
|
+
end
|
411
|
+
end
|
412
|
+
|
413
|
+
def block_sequence_entry
|
414
|
+
if @scanner.peek_token.__is_block_entry
|
415
|
+
@tokens = token = @scanner.get_token
|
416
|
+
if !(@scanner.peek_token.__is_block_entry || @scanner.peek_token.__is_block_end)
|
417
|
+
@parse_stack += [:block_sequence_entry]
|
418
|
+
return block_node
|
419
|
+
else
|
420
|
+
@parse_steck += [:block_sequence_entry]
|
421
|
+
return empty_scalar
|
422
|
+
end
|
423
|
+
end
|
424
|
+
nil
|
425
|
+
end
|
426
|
+
|
427
|
+
def block_mapping_entry
|
428
|
+
# ((KEY block_node_or_indentless_sequence?)? (VALUE block_node_or_indentless_sequence?)?)*
|
429
|
+
if @scanner.peek_token.__is_key || @scanner.peek_token.__is_value
|
430
|
+
if @scanner.check_token(KeyToken)
|
431
|
+
@tokens = token = @scanner.get_token
|
432
|
+
curr = @scanner.peek_token
|
433
|
+
if !(curr.__is_key || curr.__is_value || curr.__is_block_end)
|
434
|
+
@parse_stack += [:block_mapping_entry,:block_mapping_entry_value]
|
435
|
+
return block_node_or_indentless_sequence
|
436
|
+
else
|
437
|
+
@parse_stack += [:block_mapping_entry,:block_mapping_entry_value]
|
438
|
+
return empty_scalar
|
439
|
+
end
|
440
|
+
else
|
441
|
+
@parse_stack += [:block_mapping_entry,:block_mapping_entry_value]
|
442
|
+
return empty_scalar
|
443
|
+
end
|
444
|
+
end
|
445
|
+
nil
|
446
|
+
end
|
447
|
+
|
448
|
+
def block_mapping_entry_value
|
449
|
+
if @scanner.peek_token.__is_key || @scanner.peek_token.__is_value
|
450
|
+
if @scanner.peek_token.__is_value
|
451
|
+
@tokens = token = @scanner.get_token
|
452
|
+
curr = @scanner.peek_token
|
453
|
+
if !(curr.__is_key || curr.__is_value || curr.__is_block_end)
|
454
|
+
return block_node_or_indentless_sequence
|
455
|
+
else
|
456
|
+
return empty_scalar
|
457
|
+
end
|
458
|
+
else
|
459
|
+
@tokens = token = @scanner.peek_token
|
460
|
+
return empty_scalar
|
461
|
+
end
|
462
|
+
end
|
463
|
+
nil
|
464
|
+
end
|
465
|
+
|
466
|
+
def block_sequence
|
467
|
+
@parse_stack += [:block_sequence_end,:block_sequence_entry]
|
468
|
+
block_sequence_start
|
469
|
+
end
|
470
|
+
|
471
|
+
def block_mapping
|
472
|
+
@parse_stack += [:block_mapping_end,:block_mapping_entry]
|
473
|
+
block_mapping_start
|
474
|
+
end
|
475
|
+
|
476
|
+
def block_node_or_indentless_sequence
|
477
|
+
if @scanner.peek_token.__is_alias
|
478
|
+
return _alias
|
479
|
+
else
|
480
|
+
if @scanner.peek_token.__is_block_entry
|
481
|
+
properties
|
482
|
+
return indentless_block_sequence
|
483
|
+
else
|
484
|
+
properties
|
485
|
+
return block_content
|
486
|
+
end
|
487
|
+
end
|
488
|
+
end
|
489
|
+
|
490
|
+
def indentless_block_sequence
|
491
|
+
@parse_stack += [:block_indentless_sequence_end,:indentless_block_sequence_entry]
|
492
|
+
block_indentless_sequence_start
|
493
|
+
end
|
494
|
+
|
495
|
+
def indentless_block_sequence_entry
|
496
|
+
if @scanner.peek_token.__is_block_entry
|
497
|
+
@tokens = @scanner.get_token
|
498
|
+
curr = @scanner.peek_token
|
499
|
+
if !(curr.__is_block_entry || curr.__is_key || curr.__is_value || curr.__is_block_end)
|
500
|
+
@parse_stack << :indentless_block_sequence_entry
|
501
|
+
return block_node
|
502
|
+
else
|
503
|
+
@parse_stack << :indentless_block_sequence_entry
|
504
|
+
return empty_scalar
|
505
|
+
end
|
506
|
+
end
|
507
|
+
nil
|
508
|
+
end
|
509
|
+
|
510
|
+
def flow_sequence
|
511
|
+
@parse_stack += [:flow_sequence_end,:flow_sequence_entry]
|
512
|
+
flow_sequence_start
|
513
|
+
end
|
514
|
+
|
515
|
+
def flow_mapping
|
516
|
+
@parse_stack += [:flow_mapping_end,:flow_mapping_entry]
|
517
|
+
flow_mapping_start
|
518
|
+
end
|
519
|
+
|
520
|
+
def flow_sequence_entry
|
521
|
+
if !@scanner.peek_token.__is_flow_sequence_end
|
522
|
+
if @scanner.peek_token.__is_key
|
523
|
+
@parse_stack += [:flow_sequence_entry,:flow_entry_marker,:flow_internal_mapping_end,:flow_internal_value,:flow_internal_content]
|
524
|
+
return flow_internal_mapping_start
|
525
|
+
else
|
526
|
+
@parse_stack += [:flow_sequence_entry,:flow_node]
|
527
|
+
return flow_entry_marker
|
528
|
+
end
|
529
|
+
end
|
530
|
+
nil
|
531
|
+
end
|
532
|
+
|
533
|
+
def flow_internal_content
|
534
|
+
token = @scanner.peek_token
|
535
|
+
if !(token.__is_value || token.__is_flow_entry || token.__is_flow_sequence_end)
|
536
|
+
flow_node
|
537
|
+
else
|
538
|
+
empty_scalar
|
539
|
+
end
|
540
|
+
end
|
541
|
+
|
542
|
+
def flow_internal_value
|
543
|
+
if @scanner.peek_token.__is_value
|
544
|
+
@tokens = token = @scanner.get_token
|
545
|
+
if !(@scanner.peek_token.__is_flow_entry || @scanner.peek_token.__is_flow_sequence_end)
|
546
|
+
flow_node
|
547
|
+
else
|
548
|
+
empty_scalar
|
549
|
+
end
|
550
|
+
else
|
551
|
+
@tokens = token = @scanner.peek_token
|
552
|
+
empty_scalar
|
553
|
+
end
|
554
|
+
end
|
555
|
+
|
556
|
+
def flow_entry_marker
|
557
|
+
if @scanner.peek_token.__is_flow_entry
|
558
|
+
@scanner.get_token
|
559
|
+
end
|
560
|
+
nil
|
561
|
+
end
|
562
|
+
|
563
|
+
def flow_mapping_entry
|
564
|
+
if !@scanner.peek_token.__is_flow_mapping_end
|
565
|
+
if @scanner.peek_token.__is_key
|
566
|
+
@parse_stack += [:flow_mapping_entry,:flow_entry_marker,:flow_mapping_internal_value]
|
567
|
+
return flow_mapping_internal_content
|
568
|
+
else
|
569
|
+
@parse_stack += [:flow_mapping_entry,:flow_node]
|
570
|
+
return flow_entry_marker
|
571
|
+
end
|
572
|
+
end
|
573
|
+
nil
|
574
|
+
end
|
575
|
+
|
576
|
+
def flow_mapping_internal_content
|
577
|
+
curr = @scanner.peek_token
|
578
|
+
if !(curr.__is_value || curr.__is_flow_entry || curr.__is_flow_mapping_end)
|
579
|
+
@tokens = token = @scanner.get_token
|
580
|
+
flow_node
|
581
|
+
else
|
582
|
+
empty_scalar
|
583
|
+
end
|
584
|
+
end
|
585
|
+
|
586
|
+
def flow_mapping_internal_value
|
587
|
+
if @scanner.peek_token.__is_value
|
588
|
+
@tokens = token = @scanner.get_token
|
589
|
+
if !(@scanner.peek_token.__is_flow_entry || @scanner.peek_token.__is_flow_mapping_end)
|
590
|
+
flow_node
|
591
|
+
else
|
592
|
+
empty_scalar
|
593
|
+
end
|
594
|
+
else
|
595
|
+
@tokens = token = @scanner.peek_token
|
596
|
+
empty_scalar
|
597
|
+
end
|
598
|
+
end
|
599
|
+
|
600
|
+
|
601
|
+
def process_directives
|
602
|
+
# DIRECTIVE*
|
603
|
+
while @scanner.peek_token.__is_directive
|
604
|
+
token = @scanner.get_token
|
605
|
+
if token.name == "YAML"
|
606
|
+
raise ParserError.new(nil, nil,"found duplicate YAML directive", token.start_mark) if !@yaml_version.nil?
|
607
|
+
major, minor = token.value[0].to_i, token.value[1].to_i
|
608
|
+
raise ParserError.new(nil,nil,"found incompatible YAML document (version 1.* is required)",token.start_mark) if major != 1
|
609
|
+
@yaml_version = [major,minor]
|
610
|
+
elsif token.name == "TAG"
|
611
|
+
handle, prefix = token.value
|
612
|
+
raise ParserError.new(nil,nil,"duplicate tag handle #{handle}",token.start_mark) if @tag_handles.member?(handle)
|
613
|
+
@tag_handles[handle] = prefix
|
614
|
+
end
|
615
|
+
end
|
616
|
+
if !@tag_handles.empty?
|
617
|
+
value = @yaml_version, @tag_handles.dup
|
618
|
+
else
|
619
|
+
value = @yaml_version, nil
|
620
|
+
end
|
621
|
+
for key in DEFAULT_TAGS.keys
|
622
|
+
@tag_handles[key] = DEFAULT_TAGS[key] if !@tag_handles.include?(key)
|
623
|
+
end
|
624
|
+
value
|
625
|
+
end
|
626
|
+
|
627
|
+
def process_empty_scalar(mark)
|
628
|
+
ScalarEvent.new(nil, nil, [true, false], "", mark, mark)
|
629
|
+
end
|
630
|
+
end
|
631
|
+
end
|
632
|
+
|