outerbounds 0.3.55rc3__py3-none-any.whl → 0.3.133__py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (56) hide show
  1. outerbounds/_vendor/PyYAML.LICENSE +20 -0
  2. outerbounds/_vendor/__init__.py +0 -0
  3. outerbounds/_vendor/_yaml/__init__.py +34 -0
  4. outerbounds/_vendor/click/__init__.py +73 -0
  5. outerbounds/_vendor/click/_compat.py +626 -0
  6. outerbounds/_vendor/click/_termui_impl.py +717 -0
  7. outerbounds/_vendor/click/_textwrap.py +49 -0
  8. outerbounds/_vendor/click/_winconsole.py +279 -0
  9. outerbounds/_vendor/click/core.py +2998 -0
  10. outerbounds/_vendor/click/decorators.py +497 -0
  11. outerbounds/_vendor/click/exceptions.py +287 -0
  12. outerbounds/_vendor/click/formatting.py +301 -0
  13. outerbounds/_vendor/click/globals.py +68 -0
  14. outerbounds/_vendor/click/parser.py +529 -0
  15. outerbounds/_vendor/click/py.typed +0 -0
  16. outerbounds/_vendor/click/shell_completion.py +580 -0
  17. outerbounds/_vendor/click/termui.py +787 -0
  18. outerbounds/_vendor/click/testing.py +479 -0
  19. outerbounds/_vendor/click/types.py +1073 -0
  20. outerbounds/_vendor/click/utils.py +580 -0
  21. outerbounds/_vendor/click.LICENSE +28 -0
  22. outerbounds/_vendor/vendor_any.txt +2 -0
  23. outerbounds/_vendor/yaml/__init__.py +471 -0
  24. outerbounds/_vendor/yaml/_yaml.cpython-311-darwin.so +0 -0
  25. outerbounds/_vendor/yaml/composer.py +146 -0
  26. outerbounds/_vendor/yaml/constructor.py +862 -0
  27. outerbounds/_vendor/yaml/cyaml.py +177 -0
  28. outerbounds/_vendor/yaml/dumper.py +138 -0
  29. outerbounds/_vendor/yaml/emitter.py +1239 -0
  30. outerbounds/_vendor/yaml/error.py +94 -0
  31. outerbounds/_vendor/yaml/events.py +104 -0
  32. outerbounds/_vendor/yaml/loader.py +62 -0
  33. outerbounds/_vendor/yaml/nodes.py +51 -0
  34. outerbounds/_vendor/yaml/parser.py +629 -0
  35. outerbounds/_vendor/yaml/reader.py +208 -0
  36. outerbounds/_vendor/yaml/representer.py +378 -0
  37. outerbounds/_vendor/yaml/resolver.py +245 -0
  38. outerbounds/_vendor/yaml/scanner.py +1555 -0
  39. outerbounds/_vendor/yaml/serializer.py +127 -0
  40. outerbounds/_vendor/yaml/tokens.py +129 -0
  41. outerbounds/command_groups/apps_cli.py +450 -0
  42. outerbounds/command_groups/cli.py +9 -5
  43. outerbounds/command_groups/local_setup_cli.py +249 -33
  44. outerbounds/command_groups/perimeters_cli.py +231 -33
  45. outerbounds/command_groups/tutorials_cli.py +111 -0
  46. outerbounds/command_groups/workstations_cli.py +88 -15
  47. outerbounds/utils/kubeconfig.py +2 -2
  48. outerbounds/utils/metaflowconfig.py +111 -21
  49. outerbounds/utils/schema.py +8 -2
  50. outerbounds/utils/utils.py +19 -0
  51. outerbounds/vendor.py +159 -0
  52. {outerbounds-0.3.55rc3.dist-info → outerbounds-0.3.133.dist-info}/METADATA +17 -6
  53. outerbounds-0.3.133.dist-info/RECORD +59 -0
  54. {outerbounds-0.3.55rc3.dist-info → outerbounds-0.3.133.dist-info}/WHEEL +1 -1
  55. outerbounds-0.3.55rc3.dist-info/RECORD +0 -15
  56. {outerbounds-0.3.55rc3.dist-info → outerbounds-0.3.133.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,629 @@
1
+ # The following YAML grammar is LL(1) and is parsed by a recursive descent
2
+ # parser.
3
+ #
4
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
5
+ # implicit_document ::= block_node DOCUMENT-END*
6
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
7
+ # block_node_or_indentless_sequence ::=
8
+ # ALIAS
9
+ # | properties (block_content | indentless_block_sequence)?
10
+ # | block_content
11
+ # | indentless_block_sequence
12
+ # block_node ::= ALIAS
13
+ # | properties block_content?
14
+ # | block_content
15
+ # flow_node ::= ALIAS
16
+ # | properties flow_content?
17
+ # | flow_content
18
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
19
+ # block_content ::= block_collection | flow_collection | SCALAR
20
+ # flow_content ::= flow_collection | SCALAR
21
+ # block_collection ::= block_sequence | block_mapping
22
+ # flow_collection ::= flow_sequence | flow_mapping
23
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
24
+ # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
25
+ # block_mapping ::= BLOCK-MAPPING_START
26
+ # ((KEY block_node_or_indentless_sequence?)?
27
+ # (VALUE block_node_or_indentless_sequence?)?)*
28
+ # BLOCK-END
29
+ # flow_sequence ::= FLOW-SEQUENCE-START
30
+ # (flow_sequence_entry FLOW-ENTRY)*
31
+ # flow_sequence_entry?
32
+ # FLOW-SEQUENCE-END
33
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
34
+ # flow_mapping ::= FLOW-MAPPING-START
35
+ # (flow_mapping_entry FLOW-ENTRY)*
36
+ # flow_mapping_entry?
37
+ # FLOW-MAPPING-END
38
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
39
+ #
40
+ # FIRST sets:
41
+ #
42
+ # stream: { STREAM-START }
43
+ # explicit_document: { DIRECTIVE DOCUMENT-START }
44
+ # implicit_document: FIRST(block_node)
45
+ # block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
46
+ # flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
47
+ # block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
48
+ # flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
49
+ # block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
50
+ # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
51
+ # block_sequence: { BLOCK-SEQUENCE-START }
52
+ # block_mapping: { BLOCK-MAPPING-START }
53
+ # block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
54
+ # indentless_sequence: { ENTRY }
55
+ # flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
56
+ # flow_sequence: { FLOW-SEQUENCE-START }
57
+ # flow_mapping: { FLOW-MAPPING-START }
58
+ # flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
59
+ # flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
60
+
61
+ __all__ = ["Parser", "ParserError"]
62
+
63
+ from .error import MarkedYAMLError
64
+ from .tokens import *
65
+ from .events import *
66
+ from .scanner import *
67
+
68
+
69
+ class ParserError(MarkedYAMLError):
70
+ pass
71
+
72
+
73
+ class Parser:
74
+ # Since writing a recursive-descendant parser is a straightforward task, we
75
+ # do not give many comments here.
76
+
77
+ DEFAULT_TAGS = {
78
+ "!": "!",
79
+ "!!": "tag:yaml.org,2002:",
80
+ }
81
+
82
+ def __init__(self):
83
+ self.current_event = None
84
+ self.yaml_version = None
85
+ self.tag_handles = {}
86
+ self.states = []
87
+ self.marks = []
88
+ self.state = self.parse_stream_start
89
+
90
+ def dispose(self):
91
+ # Reset the state attributes (to clear self-references)
92
+ self.states = []
93
+ self.state = None
94
+
95
+ def check_event(self, *choices):
96
+ # Check the type of the next event.
97
+ if self.current_event is None:
98
+ if self.state:
99
+ self.current_event = self.state()
100
+ if self.current_event is not None:
101
+ if not choices:
102
+ return True
103
+ for choice in choices:
104
+ if isinstance(self.current_event, choice):
105
+ return True
106
+ return False
107
+
108
+ def peek_event(self):
109
+ # Get the next event.
110
+ if self.current_event is None:
111
+ if self.state:
112
+ self.current_event = self.state()
113
+ return self.current_event
114
+
115
+ def get_event(self):
116
+ # Get the next event and proceed further.
117
+ if self.current_event is None:
118
+ if self.state:
119
+ self.current_event = self.state()
120
+ value = self.current_event
121
+ self.current_event = None
122
+ return value
123
+
124
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
125
+ # implicit_document ::= block_node DOCUMENT-END*
126
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
127
+
128
+ def parse_stream_start(self):
129
+
130
+ # Parse the stream start.
131
+ token = self.get_token()
132
+ event = StreamStartEvent(
133
+ token.start_mark, token.end_mark, encoding=token.encoding
134
+ )
135
+
136
+ # Prepare the next state.
137
+ self.state = self.parse_implicit_document_start
138
+
139
+ return event
140
+
141
+ def parse_implicit_document_start(self):
142
+
143
+ # Parse an implicit document.
144
+ if not self.check_token(DirectiveToken, DocumentStartToken, StreamEndToken):
145
+ self.tag_handles = self.DEFAULT_TAGS
146
+ token = self.peek_token()
147
+ start_mark = end_mark = token.start_mark
148
+ event = DocumentStartEvent(start_mark, end_mark, explicit=False)
149
+
150
+ # Prepare the next state.
151
+ self.states.append(self.parse_document_end)
152
+ self.state = self.parse_block_node
153
+
154
+ return event
155
+
156
+ else:
157
+ return self.parse_document_start()
158
+
159
+ def parse_document_start(self):
160
+
161
+ # Parse any extra document end indicators.
162
+ while self.check_token(DocumentEndToken):
163
+ self.get_token()
164
+
165
+ # Parse an explicit document.
166
+ if not self.check_token(StreamEndToken):
167
+ token = self.peek_token()
168
+ start_mark = token.start_mark
169
+ version, tags = self.process_directives()
170
+ if not self.check_token(DocumentStartToken):
171
+ raise ParserError(
172
+ None,
173
+ None,
174
+ "expected '<document start>', but found %r" % self.peek_token().id,
175
+ self.peek_token().start_mark,
176
+ )
177
+ token = self.get_token()
178
+ end_mark = token.end_mark
179
+ event = DocumentStartEvent(
180
+ start_mark, end_mark, explicit=True, version=version, tags=tags
181
+ )
182
+ self.states.append(self.parse_document_end)
183
+ self.state = self.parse_document_content
184
+ else:
185
+ # Parse the end of the stream.
186
+ token = self.get_token()
187
+ event = StreamEndEvent(token.start_mark, token.end_mark)
188
+ assert not self.states
189
+ assert not self.marks
190
+ self.state = None
191
+ return event
192
+
193
+ def parse_document_end(self):
194
+
195
+ # Parse the document end.
196
+ token = self.peek_token()
197
+ start_mark = end_mark = token.start_mark
198
+ explicit = False
199
+ if self.check_token(DocumentEndToken):
200
+ token = self.get_token()
201
+ end_mark = token.end_mark
202
+ explicit = True
203
+ event = DocumentEndEvent(start_mark, end_mark, explicit=explicit)
204
+
205
+ # Prepare the next state.
206
+ self.state = self.parse_document_start
207
+
208
+ return event
209
+
210
+ def parse_document_content(self):
211
+ if self.check_token(
212
+ DirectiveToken, DocumentStartToken, DocumentEndToken, StreamEndToken
213
+ ):
214
+ event = self.process_empty_scalar(self.peek_token().start_mark)
215
+ self.state = self.states.pop()
216
+ return event
217
+ else:
218
+ return self.parse_block_node()
219
+
220
+ def process_directives(self):
221
+ self.yaml_version = None
222
+ self.tag_handles = {}
223
+ while self.check_token(DirectiveToken):
224
+ token = self.get_token()
225
+ if token.name == "YAML":
226
+ if self.yaml_version is not None:
227
+ raise ParserError(
228
+ None, None, "found duplicate YAML directive", token.start_mark
229
+ )
230
+ major, minor = token.value
231
+ if major != 1:
232
+ raise ParserError(
233
+ None,
234
+ None,
235
+ "found incompatible YAML document (version 1.* is required)",
236
+ token.start_mark,
237
+ )
238
+ self.yaml_version = token.value
239
+ elif token.name == "TAG":
240
+ handle, prefix = token.value
241
+ if handle in self.tag_handles:
242
+ raise ParserError(
243
+ None, None, "duplicate tag handle %r" % handle, token.start_mark
244
+ )
245
+ self.tag_handles[handle] = prefix
246
+ if self.tag_handles:
247
+ value = self.yaml_version, self.tag_handles.copy()
248
+ else:
249
+ value = self.yaml_version, None
250
+ for key in self.DEFAULT_TAGS:
251
+ if key not in self.tag_handles:
252
+ self.tag_handles[key] = self.DEFAULT_TAGS[key]
253
+ return value
254
+
255
+ # block_node_or_indentless_sequence ::= ALIAS
256
+ # | properties (block_content | indentless_block_sequence)?
257
+ # | block_content
258
+ # | indentless_block_sequence
259
+ # block_node ::= ALIAS
260
+ # | properties block_content?
261
+ # | block_content
262
+ # flow_node ::= ALIAS
263
+ # | properties flow_content?
264
+ # | flow_content
265
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
266
+ # block_content ::= block_collection | flow_collection | SCALAR
267
+ # flow_content ::= flow_collection | SCALAR
268
+ # block_collection ::= block_sequence | block_mapping
269
+ # flow_collection ::= flow_sequence | flow_mapping
270
+
271
+ def parse_block_node(self):
272
+ return self.parse_node(block=True)
273
+
274
+ def parse_flow_node(self):
275
+ return self.parse_node()
276
+
277
+ def parse_block_node_or_indentless_sequence(self):
278
+ return self.parse_node(block=True, indentless_sequence=True)
279
+
280
+ def parse_node(self, block=False, indentless_sequence=False):
281
+ if self.check_token(AliasToken):
282
+ token = self.get_token()
283
+ event = AliasEvent(token.value, token.start_mark, token.end_mark)
284
+ self.state = self.states.pop()
285
+ else:
286
+ anchor = None
287
+ tag = None
288
+ start_mark = end_mark = tag_mark = None
289
+ if self.check_token(AnchorToken):
290
+ token = self.get_token()
291
+ start_mark = token.start_mark
292
+ end_mark = token.end_mark
293
+ anchor = token.value
294
+ if self.check_token(TagToken):
295
+ token = self.get_token()
296
+ tag_mark = token.start_mark
297
+ end_mark = token.end_mark
298
+ tag = token.value
299
+ elif self.check_token(TagToken):
300
+ token = self.get_token()
301
+ start_mark = tag_mark = token.start_mark
302
+ end_mark = token.end_mark
303
+ tag = token.value
304
+ if self.check_token(AnchorToken):
305
+ token = self.get_token()
306
+ end_mark = token.end_mark
307
+ anchor = token.value
308
+ if tag is not None:
309
+ handle, suffix = tag
310
+ if handle is not None:
311
+ if handle not in self.tag_handles:
312
+ raise ParserError(
313
+ "while parsing a node",
314
+ start_mark,
315
+ "found undefined tag handle %r" % handle,
316
+ tag_mark,
317
+ )
318
+ tag = self.tag_handles[handle] + suffix
319
+ else:
320
+ tag = suffix
321
+ # if tag == '!':
322
+ # raise ParserError("while parsing a node", start_mark,
323
+ # "found non-specific tag '!'", tag_mark,
324
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
325
+ if start_mark is None:
326
+ start_mark = end_mark = self.peek_token().start_mark
327
+ event = None
328
+ implicit = tag is None or tag == "!"
329
+ if indentless_sequence and self.check_token(BlockEntryToken):
330
+ end_mark = self.peek_token().end_mark
331
+ event = SequenceStartEvent(anchor, tag, implicit, start_mark, end_mark)
332
+ self.state = self.parse_indentless_sequence_entry
333
+ else:
334
+ if self.check_token(ScalarToken):
335
+ token = self.get_token()
336
+ end_mark = token.end_mark
337
+ if (token.plain and tag is None) or tag == "!":
338
+ implicit = (True, False)
339
+ elif tag is None:
340
+ implicit = (False, True)
341
+ else:
342
+ implicit = (False, False)
343
+ event = ScalarEvent(
344
+ anchor,
345
+ tag,
346
+ implicit,
347
+ token.value,
348
+ start_mark,
349
+ end_mark,
350
+ style=token.style,
351
+ )
352
+ self.state = self.states.pop()
353
+ elif self.check_token(FlowSequenceStartToken):
354
+ end_mark = self.peek_token().end_mark
355
+ event = SequenceStartEvent(
356
+ anchor, tag, implicit, start_mark, end_mark, flow_style=True
357
+ )
358
+ self.state = self.parse_flow_sequence_first_entry
359
+ elif self.check_token(FlowMappingStartToken):
360
+ end_mark = self.peek_token().end_mark
361
+ event = MappingStartEvent(
362
+ anchor, tag, implicit, start_mark, end_mark, flow_style=True
363
+ )
364
+ self.state = self.parse_flow_mapping_first_key
365
+ elif block and self.check_token(BlockSequenceStartToken):
366
+ end_mark = self.peek_token().start_mark
367
+ event = SequenceStartEvent(
368
+ anchor, tag, implicit, start_mark, end_mark, flow_style=False
369
+ )
370
+ self.state = self.parse_block_sequence_first_entry
371
+ elif block and self.check_token(BlockMappingStartToken):
372
+ end_mark = self.peek_token().start_mark
373
+ event = MappingStartEvent(
374
+ anchor, tag, implicit, start_mark, end_mark, flow_style=False
375
+ )
376
+ self.state = self.parse_block_mapping_first_key
377
+ elif anchor is not None or tag is not None:
378
+ # Empty scalars are allowed even if a tag or an anchor is
379
+ # specified.
380
+ event = ScalarEvent(
381
+ anchor, tag, (implicit, False), "", start_mark, end_mark
382
+ )
383
+ self.state = self.states.pop()
384
+ else:
385
+ if block:
386
+ node = "block"
387
+ else:
388
+ node = "flow"
389
+ token = self.peek_token()
390
+ raise ParserError(
391
+ "while parsing a %s node" % node,
392
+ start_mark,
393
+ "expected the node content, but found %r" % token.id,
394
+ token.start_mark,
395
+ )
396
+ return event
397
+
398
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
399
+
400
+ def parse_block_sequence_first_entry(self):
401
+ token = self.get_token()
402
+ self.marks.append(token.start_mark)
403
+ return self.parse_block_sequence_entry()
404
+
405
+ def parse_block_sequence_entry(self):
406
+ if self.check_token(BlockEntryToken):
407
+ token = self.get_token()
408
+ if not self.check_token(BlockEntryToken, BlockEndToken):
409
+ self.states.append(self.parse_block_sequence_entry)
410
+ return self.parse_block_node()
411
+ else:
412
+ self.state = self.parse_block_sequence_entry
413
+ return self.process_empty_scalar(token.end_mark)
414
+ if not self.check_token(BlockEndToken):
415
+ token = self.peek_token()
416
+ raise ParserError(
417
+ "while parsing a block collection",
418
+ self.marks[-1],
419
+ "expected <block end>, but found %r" % token.id,
420
+ token.start_mark,
421
+ )
422
+ token = self.get_token()
423
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
424
+ self.state = self.states.pop()
425
+ self.marks.pop()
426
+ return event
427
+
428
+ # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
429
+
430
+ def parse_indentless_sequence_entry(self):
431
+ if self.check_token(BlockEntryToken):
432
+ token = self.get_token()
433
+ if not self.check_token(
434
+ BlockEntryToken, KeyToken, ValueToken, BlockEndToken
435
+ ):
436
+ self.states.append(self.parse_indentless_sequence_entry)
437
+ return self.parse_block_node()
438
+ else:
439
+ self.state = self.parse_indentless_sequence_entry
440
+ return self.process_empty_scalar(token.end_mark)
441
+ token = self.peek_token()
442
+ event = SequenceEndEvent(token.start_mark, token.start_mark)
443
+ self.state = self.states.pop()
444
+ return event
445
+
446
+ # block_mapping ::= BLOCK-MAPPING_START
447
+ # ((KEY block_node_or_indentless_sequence?)?
448
+ # (VALUE block_node_or_indentless_sequence?)?)*
449
+ # BLOCK-END
450
+
451
+ def parse_block_mapping_first_key(self):
452
+ token = self.get_token()
453
+ self.marks.append(token.start_mark)
454
+ return self.parse_block_mapping_key()
455
+
456
+ def parse_block_mapping_key(self):
457
+ if self.check_token(KeyToken):
458
+ token = self.get_token()
459
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
460
+ self.states.append(self.parse_block_mapping_value)
461
+ return self.parse_block_node_or_indentless_sequence()
462
+ else:
463
+ self.state = self.parse_block_mapping_value
464
+ return self.process_empty_scalar(token.end_mark)
465
+ if not self.check_token(BlockEndToken):
466
+ token = self.peek_token()
467
+ raise ParserError(
468
+ "while parsing a block mapping",
469
+ self.marks[-1],
470
+ "expected <block end>, but found %r" % token.id,
471
+ token.start_mark,
472
+ )
473
+ token = self.get_token()
474
+ event = MappingEndEvent(token.start_mark, token.end_mark)
475
+ self.state = self.states.pop()
476
+ self.marks.pop()
477
+ return event
478
+
479
+ def parse_block_mapping_value(self):
480
+ if self.check_token(ValueToken):
481
+ token = self.get_token()
482
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
483
+ self.states.append(self.parse_block_mapping_key)
484
+ return self.parse_block_node_or_indentless_sequence()
485
+ else:
486
+ self.state = self.parse_block_mapping_key
487
+ return self.process_empty_scalar(token.end_mark)
488
+ else:
489
+ self.state = self.parse_block_mapping_key
490
+ token = self.peek_token()
491
+ return self.process_empty_scalar(token.start_mark)
492
+
493
+ # flow_sequence ::= FLOW-SEQUENCE-START
494
+ # (flow_sequence_entry FLOW-ENTRY)*
495
+ # flow_sequence_entry?
496
+ # FLOW-SEQUENCE-END
497
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
498
+ #
499
+ # Note that while production rules for both flow_sequence_entry and
500
+ # flow_mapping_entry are equal, their interpretations are different.
501
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
502
+ # generate an inline mapping (set syntax).
503
+
504
+ def parse_flow_sequence_first_entry(self):
505
+ token = self.get_token()
506
+ self.marks.append(token.start_mark)
507
+ return self.parse_flow_sequence_entry(first=True)
508
+
509
+ def parse_flow_sequence_entry(self, first=False):
510
+ if not self.check_token(FlowSequenceEndToken):
511
+ if not first:
512
+ if self.check_token(FlowEntryToken):
513
+ self.get_token()
514
+ else:
515
+ token = self.peek_token()
516
+ raise ParserError(
517
+ "while parsing a flow sequence",
518
+ self.marks[-1],
519
+ "expected ',' or ']', but got %r" % token.id,
520
+ token.start_mark,
521
+ )
522
+
523
+ if self.check_token(KeyToken):
524
+ token = self.peek_token()
525
+ event = MappingStartEvent(
526
+ None, None, True, token.start_mark, token.end_mark, flow_style=True
527
+ )
528
+ self.state = self.parse_flow_sequence_entry_mapping_key
529
+ return event
530
+ elif not self.check_token(FlowSequenceEndToken):
531
+ self.states.append(self.parse_flow_sequence_entry)
532
+ return self.parse_flow_node()
533
+ token = self.get_token()
534
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
535
+ self.state = self.states.pop()
536
+ self.marks.pop()
537
+ return event
538
+
539
+ def parse_flow_sequence_entry_mapping_key(self):
540
+ token = self.get_token()
541
+ if not self.check_token(ValueToken, FlowEntryToken, FlowSequenceEndToken):
542
+ self.states.append(self.parse_flow_sequence_entry_mapping_value)
543
+ return self.parse_flow_node()
544
+ else:
545
+ self.state = self.parse_flow_sequence_entry_mapping_value
546
+ return self.process_empty_scalar(token.end_mark)
547
+
548
+ def parse_flow_sequence_entry_mapping_value(self):
549
+ if self.check_token(ValueToken):
550
+ token = self.get_token()
551
+ if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
552
+ self.states.append(self.parse_flow_sequence_entry_mapping_end)
553
+ return self.parse_flow_node()
554
+ else:
555
+ self.state = self.parse_flow_sequence_entry_mapping_end
556
+ return self.process_empty_scalar(token.end_mark)
557
+ else:
558
+ self.state = self.parse_flow_sequence_entry_mapping_end
559
+ token = self.peek_token()
560
+ return self.process_empty_scalar(token.start_mark)
561
+
562
+ def parse_flow_sequence_entry_mapping_end(self):
563
+ self.state = self.parse_flow_sequence_entry
564
+ token = self.peek_token()
565
+ return MappingEndEvent(token.start_mark, token.start_mark)
566
+
567
+ # flow_mapping ::= FLOW-MAPPING-START
568
+ # (flow_mapping_entry FLOW-ENTRY)*
569
+ # flow_mapping_entry?
570
+ # FLOW-MAPPING-END
571
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
572
+
573
+ def parse_flow_mapping_first_key(self):
574
+ token = self.get_token()
575
+ self.marks.append(token.start_mark)
576
+ return self.parse_flow_mapping_key(first=True)
577
+
578
+ def parse_flow_mapping_key(self, first=False):
579
+ if not self.check_token(FlowMappingEndToken):
580
+ if not first:
581
+ if self.check_token(FlowEntryToken):
582
+ self.get_token()
583
+ else:
584
+ token = self.peek_token()
585
+ raise ParserError(
586
+ "while parsing a flow mapping",
587
+ self.marks[-1],
588
+ "expected ',' or '}', but got %r" % token.id,
589
+ token.start_mark,
590
+ )
591
+ if self.check_token(KeyToken):
592
+ token = self.get_token()
593
+ if not self.check_token(
594
+ ValueToken, FlowEntryToken, FlowMappingEndToken
595
+ ):
596
+ self.states.append(self.parse_flow_mapping_value)
597
+ return self.parse_flow_node()
598
+ else:
599
+ self.state = self.parse_flow_mapping_value
600
+ return self.process_empty_scalar(token.end_mark)
601
+ elif not self.check_token(FlowMappingEndToken):
602
+ self.states.append(self.parse_flow_mapping_empty_value)
603
+ return self.parse_flow_node()
604
+ token = self.get_token()
605
+ event = MappingEndEvent(token.start_mark, token.end_mark)
606
+ self.state = self.states.pop()
607
+ self.marks.pop()
608
+ return event
609
+
610
+ def parse_flow_mapping_value(self):
611
+ if self.check_token(ValueToken):
612
+ token = self.get_token()
613
+ if not self.check_token(FlowEntryToken, FlowMappingEndToken):
614
+ self.states.append(self.parse_flow_mapping_key)
615
+ return self.parse_flow_node()
616
+ else:
617
+ self.state = self.parse_flow_mapping_key
618
+ return self.process_empty_scalar(token.end_mark)
619
+ else:
620
+ self.state = self.parse_flow_mapping_key
621
+ token = self.peek_token()
622
+ return self.process_empty_scalar(token.start_mark)
623
+
624
+ def parse_flow_mapping_empty_value(self):
625
+ self.state = self.parse_flow_mapping_key
626
+ return self.process_empty_scalar(self.peek_token().start_mark)
627
+
628
+ def process_empty_scalar(self, mark):
629
+ return ScalarEvent(None, None, (True, False), "", mark, mark)