metaflow 2.16.5__py2.py3-none-any.whl → 2.16.7__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. metaflow/_vendor/click/core.py +3 -4
  2. metaflow/_vendor/imghdr/__init__.py +11 -0
  3. metaflow/_vendor/yaml/__init__.py +427 -0
  4. metaflow/_vendor/yaml/composer.py +139 -0
  5. metaflow/_vendor/yaml/constructor.py +748 -0
  6. metaflow/_vendor/yaml/cyaml.py +101 -0
  7. metaflow/_vendor/yaml/dumper.py +62 -0
  8. metaflow/_vendor/yaml/emitter.py +1137 -0
  9. metaflow/_vendor/yaml/error.py +75 -0
  10. metaflow/_vendor/yaml/events.py +86 -0
  11. metaflow/_vendor/yaml/loader.py +63 -0
  12. metaflow/_vendor/yaml/nodes.py +49 -0
  13. metaflow/_vendor/yaml/parser.py +589 -0
  14. metaflow/_vendor/yaml/reader.py +185 -0
  15. metaflow/_vendor/yaml/representer.py +389 -0
  16. metaflow/_vendor/yaml/resolver.py +227 -0
  17. metaflow/_vendor/yaml/scanner.py +1435 -0
  18. metaflow/_vendor/yaml/serializer.py +111 -0
  19. metaflow/_vendor/yaml/tokens.py +104 -0
  20. metaflow/cli.py +11 -2
  21. metaflow/client/core.py +6 -1
  22. metaflow/extension_support/__init__.py +4 -3
  23. metaflow/metaflow_environment.py +14 -6
  24. metaflow/package/__init__.py +18 -9
  25. metaflow/packaging_sys/__init__.py +53 -43
  26. metaflow/packaging_sys/backend.py +21 -6
  27. metaflow/packaging_sys/tar_backend.py +16 -3
  28. metaflow/packaging_sys/v1.py +21 -21
  29. metaflow/plugins/argo/argo_workflows_deployer_objects.py +37 -0
  30. metaflow/plugins/aws/step_functions/step_functions_deployer_objects.py +16 -0
  31. metaflow/plugins/cards/card_modules/convert_to_native_type.py +7 -1
  32. metaflow/plugins/pypi/conda_decorator.py +4 -2
  33. metaflow/runner/click_api.py +14 -7
  34. metaflow/runner/deployer.py +80 -1
  35. metaflow/runner/subprocess_manager.py +20 -12
  36. metaflow/user_decorators/mutable_flow.py +3 -1
  37. metaflow/vendor.py +23 -6
  38. metaflow/version.py +1 -1
  39. {metaflow-2.16.5.dist-info → metaflow-2.16.7.dist-info}/METADATA +2 -2
  40. {metaflow-2.16.5.dist-info → metaflow-2.16.7.dist-info}/RECORD +47 -30
  41. {metaflow-2.16.5.data → metaflow-2.16.7.data}/data/share/metaflow/devtools/Makefile +0 -0
  42. {metaflow-2.16.5.data → metaflow-2.16.7.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  43. {metaflow-2.16.5.data → metaflow-2.16.7.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  44. {metaflow-2.16.5.dist-info → metaflow-2.16.7.dist-info}/WHEEL +0 -0
  45. {metaflow-2.16.5.dist-info → metaflow-2.16.7.dist-info}/entry_points.txt +0 -0
  46. {metaflow-2.16.5.dist-info → metaflow-2.16.7.dist-info}/licenses/LICENSE +0 -0
  47. {metaflow-2.16.5.dist-info → metaflow-2.16.7.dist-info}/top_level.txt +0 -0
@@ -719,7 +719,7 @@ class BaseCommand(object):
719
719
  prog_name=None,
720
720
  complete_var=None,
721
721
  standalone_mode=True,
722
- **extra,
722
+ **extra
723
723
  ):
724
724
  """This is the way to invoke a script with all the bells and
725
725
  whistles as a command line application. This will always terminate
@@ -1101,7 +1101,7 @@ class MultiCommand(Command):
1101
1101
  subcommand_metavar=None,
1102
1102
  chain=False,
1103
1103
  result_callback=None,
1104
- **attrs,
1104
+ **attrs
1105
1105
  ):
1106
1106
  Command.__init__(self, name, **attrs)
1107
1107
  if no_args_is_help is None:
@@ -1463,7 +1463,6 @@ class Parameter(object):
1463
1463
  parameter. The old callback format will still work, but it will
1464
1464
  raise a warning to give you a chance to migrate the code easier.
1465
1465
  """
1466
-
1467
1466
  param_type_name = "parameter"
1468
1467
 
1469
1468
  def __init__(
@@ -1709,7 +1708,7 @@ class Option(Parameter):
1709
1708
  hidden=False,
1710
1709
  show_choices=True,
1711
1710
  show_envvar=False,
1712
- **attrs,
1711
+ **attrs
1713
1712
  ):
1714
1713
  default_is_missing = attrs.get("default", _missing) is _missing
1715
1714
  Parameter.__init__(self, param_decls, type=type, **attrs)
@@ -1,9 +1,20 @@
1
1
  """Recognize image file formats based on their first few bytes."""
2
2
 
3
3
  from os import PathLike
4
+ import warnings
4
5
 
5
6
  __all__ = ["what"]
6
7
 
8
+
9
+ # python-deadlib: Replace deprecation warning not to raise exception
10
+ warnings.warn(
11
+ f"{__name__} was removed in Python 3.13. "
12
+ f"Please be aware that you are currently NOT using standard '{__name__}', "
13
+ f"but instead a separately installed 'standard-{__name__}'.",
14
+ DeprecationWarning, stacklevel=2
15
+ )
16
+
17
+
7
18
  #-------------------------#
8
19
  # Recognize image headers #
9
20
  #-------------------------#
@@ -0,0 +1,427 @@
1
+
2
+ from .error import *
3
+
4
+ from .tokens import *
5
+ from .events import *
6
+ from .nodes import *
7
+
8
+ from .loader import *
9
+ from .dumper import *
10
+
11
+ __version__ = '5.3.1'
12
+ try:
13
+ from .cyaml import *
14
+ __with_libyaml__ = True
15
+ except ImportError:
16
+ __with_libyaml__ = False
17
+
18
+ import io
19
+
20
+ #------------------------------------------------------------------------------
21
+ # Warnings control
22
+ #------------------------------------------------------------------------------
23
+
24
+ # 'Global' warnings state:
25
+ _warnings_enabled = {
26
+ 'YAMLLoadWarning': True,
27
+ }
28
+
29
+ # Get or set global warnings' state
30
+ def warnings(settings=None):
31
+ if settings is None:
32
+ return _warnings_enabled
33
+
34
+ if type(settings) is dict:
35
+ for key in settings:
36
+ if key in _warnings_enabled:
37
+ _warnings_enabled[key] = settings[key]
38
+
39
+ # Warn when load() is called without Loader=...
40
+ class YAMLLoadWarning(RuntimeWarning):
41
+ pass
42
+
43
+ def load_warning(method):
44
+ if _warnings_enabled['YAMLLoadWarning'] is False:
45
+ return
46
+
47
+ import warnings
48
+
49
+ message = (
50
+ "calling yaml.%s() without Loader=... is deprecated, as the "
51
+ "default Loader is unsafe. Please read "
52
+ "https://msg.pyyaml.org/load for full details."
53
+ ) % method
54
+
55
+ warnings.warn(message, YAMLLoadWarning, stacklevel=3)
56
+
57
+ #------------------------------------------------------------------------------
58
+ def scan(stream, Loader=Loader):
59
+ """
60
+ Scan a YAML stream and produce scanning tokens.
61
+ """
62
+ loader = Loader(stream)
63
+ try:
64
+ while loader.check_token():
65
+ yield loader.get_token()
66
+ finally:
67
+ loader.dispose()
68
+
69
+ def parse(stream, Loader=Loader):
70
+ """
71
+ Parse a YAML stream and produce parsing events.
72
+ """
73
+ loader = Loader(stream)
74
+ try:
75
+ while loader.check_event():
76
+ yield loader.get_event()
77
+ finally:
78
+ loader.dispose()
79
+
80
+ def compose(stream, Loader=Loader):
81
+ """
82
+ Parse the first YAML document in a stream
83
+ and produce the corresponding representation tree.
84
+ """
85
+ loader = Loader(stream)
86
+ try:
87
+ return loader.get_single_node()
88
+ finally:
89
+ loader.dispose()
90
+
91
+ def compose_all(stream, Loader=Loader):
92
+ """
93
+ Parse all YAML documents in a stream
94
+ and produce corresponding representation trees.
95
+ """
96
+ loader = Loader(stream)
97
+ try:
98
+ while loader.check_node():
99
+ yield loader.get_node()
100
+ finally:
101
+ loader.dispose()
102
+
103
+ def load(stream, Loader=None):
104
+ """
105
+ Parse the first YAML document in a stream
106
+ and produce the corresponding Python object.
107
+ """
108
+ if Loader is None:
109
+ load_warning('load')
110
+ Loader = FullLoader
111
+
112
+ loader = Loader(stream)
113
+ try:
114
+ return loader.get_single_data()
115
+ finally:
116
+ loader.dispose()
117
+
118
+ def load_all(stream, Loader=None):
119
+ """
120
+ Parse all YAML documents in a stream
121
+ and produce corresponding Python objects.
122
+ """
123
+ if Loader is None:
124
+ load_warning('load_all')
125
+ Loader = FullLoader
126
+
127
+ loader = Loader(stream)
128
+ try:
129
+ while loader.check_data():
130
+ yield loader.get_data()
131
+ finally:
132
+ loader.dispose()
133
+
134
+ def full_load(stream):
135
+ """
136
+ Parse the first YAML document in a stream
137
+ and produce the corresponding Python object.
138
+
139
+ Resolve all tags except those known to be
140
+ unsafe on untrusted input.
141
+ """
142
+ return load(stream, FullLoader)
143
+
144
+ def full_load_all(stream):
145
+ """
146
+ Parse all YAML documents in a stream
147
+ and produce corresponding Python objects.
148
+
149
+ Resolve all tags except those known to be
150
+ unsafe on untrusted input.
151
+ """
152
+ return load_all(stream, FullLoader)
153
+
154
+ def safe_load(stream):
155
+ """
156
+ Parse the first YAML document in a stream
157
+ and produce the corresponding Python object.
158
+
159
+ Resolve only basic YAML tags. This is known
160
+ to be safe for untrusted input.
161
+ """
162
+ return load(stream, SafeLoader)
163
+
164
+ def safe_load_all(stream):
165
+ """
166
+ Parse all YAML documents in a stream
167
+ and produce corresponding Python objects.
168
+
169
+ Resolve only basic YAML tags. This is known
170
+ to be safe for untrusted input.
171
+ """
172
+ return load_all(stream, SafeLoader)
173
+
174
+ def unsafe_load(stream):
175
+ """
176
+ Parse the first YAML document in a stream
177
+ and produce the corresponding Python object.
178
+
179
+ Resolve all tags, even those known to be
180
+ unsafe on untrusted input.
181
+ """
182
+ return load(stream, UnsafeLoader)
183
+
184
+ def unsafe_load_all(stream):
185
+ """
186
+ Parse all YAML documents in a stream
187
+ and produce corresponding Python objects.
188
+
189
+ Resolve all tags, even those known to be
190
+ unsafe on untrusted input.
191
+ """
192
+ return load_all(stream, UnsafeLoader)
193
+
194
+ def emit(events, stream=None, Dumper=Dumper,
195
+ canonical=None, indent=None, width=None,
196
+ allow_unicode=None, line_break=None):
197
+ """
198
+ Emit YAML parsing events into a stream.
199
+ If stream is None, return the produced string instead.
200
+ """
201
+ getvalue = None
202
+ if stream is None:
203
+ stream = io.StringIO()
204
+ getvalue = stream.getvalue
205
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
206
+ allow_unicode=allow_unicode, line_break=line_break)
207
+ try:
208
+ for event in events:
209
+ dumper.emit(event)
210
+ finally:
211
+ dumper.dispose()
212
+ if getvalue:
213
+ return getvalue()
214
+
215
+ def serialize_all(nodes, stream=None, Dumper=Dumper,
216
+ canonical=None, indent=None, width=None,
217
+ allow_unicode=None, line_break=None,
218
+ encoding=None, explicit_start=None, explicit_end=None,
219
+ version=None, tags=None):
220
+ """
221
+ Serialize a sequence of representation trees into a YAML stream.
222
+ If stream is None, return the produced string instead.
223
+ """
224
+ getvalue = None
225
+ if stream is None:
226
+ if encoding is None:
227
+ stream = io.StringIO()
228
+ else:
229
+ stream = io.BytesIO()
230
+ getvalue = stream.getvalue
231
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
232
+ allow_unicode=allow_unicode, line_break=line_break,
233
+ encoding=encoding, version=version, tags=tags,
234
+ explicit_start=explicit_start, explicit_end=explicit_end)
235
+ try:
236
+ dumper.open()
237
+ for node in nodes:
238
+ dumper.serialize(node)
239
+ dumper.close()
240
+ finally:
241
+ dumper.dispose()
242
+ if getvalue:
243
+ return getvalue()
244
+
245
+ def serialize(node, stream=None, Dumper=Dumper, **kwds):
246
+ """
247
+ Serialize a representation tree into a YAML stream.
248
+ If stream is None, return the produced string instead.
249
+ """
250
+ return serialize_all([node], stream, Dumper=Dumper, **kwds)
251
+
252
+ def dump_all(documents, stream=None, Dumper=Dumper,
253
+ default_style=None, default_flow_style=False,
254
+ canonical=None, indent=None, width=None,
255
+ allow_unicode=None, line_break=None,
256
+ encoding=None, explicit_start=None, explicit_end=None,
257
+ version=None, tags=None, sort_keys=True):
258
+ """
259
+ Serialize a sequence of Python objects into a YAML stream.
260
+ If stream is None, return the produced string instead.
261
+ """
262
+ getvalue = None
263
+ if stream is None:
264
+ if encoding is None:
265
+ stream = io.StringIO()
266
+ else:
267
+ stream = io.BytesIO()
268
+ getvalue = stream.getvalue
269
+ dumper = Dumper(stream, default_style=default_style,
270
+ default_flow_style=default_flow_style,
271
+ canonical=canonical, indent=indent, width=width,
272
+ allow_unicode=allow_unicode, line_break=line_break,
273
+ encoding=encoding, version=version, tags=tags,
274
+ explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys)
275
+ try:
276
+ dumper.open()
277
+ for data in documents:
278
+ dumper.represent(data)
279
+ dumper.close()
280
+ finally:
281
+ dumper.dispose()
282
+ if getvalue:
283
+ return getvalue()
284
+
285
+ def dump(data, stream=None, Dumper=Dumper, **kwds):
286
+ """
287
+ Serialize a Python object into a YAML stream.
288
+ If stream is None, return the produced string instead.
289
+ """
290
+ return dump_all([data], stream, Dumper=Dumper, **kwds)
291
+
292
+ def safe_dump_all(documents, stream=None, **kwds):
293
+ """
294
+ Serialize a sequence of Python objects into a YAML stream.
295
+ Produce only basic YAML tags.
296
+ If stream is None, return the produced string instead.
297
+ """
298
+ return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
299
+
300
+ def safe_dump(data, stream=None, **kwds):
301
+ """
302
+ Serialize a Python object into a YAML stream.
303
+ Produce only basic YAML tags.
304
+ If stream is None, return the produced string instead.
305
+ """
306
+ return dump_all([data], stream, Dumper=SafeDumper, **kwds)
307
+
308
+ def add_implicit_resolver(tag, regexp, first=None,
309
+ Loader=None, Dumper=Dumper):
310
+ """
311
+ Add an implicit scalar detector.
312
+ If an implicit scalar value matches the given regexp,
313
+ the corresponding tag is assigned to the scalar.
314
+ first is a sequence of possible initial characters or None.
315
+ """
316
+ if Loader is None:
317
+ loader.Loader.add_implicit_resolver(tag, regexp, first)
318
+ loader.FullLoader.add_implicit_resolver(tag, regexp, first)
319
+ loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first)
320
+ else:
321
+ Loader.add_implicit_resolver(tag, regexp, first)
322
+ Dumper.add_implicit_resolver(tag, regexp, first)
323
+
324
+ def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper):
325
+ """
326
+ Add a path based resolver for the given tag.
327
+ A path is a list of keys that forms a path
328
+ to a node in the representation tree.
329
+ Keys can be string values, integers, or None.
330
+ """
331
+ if Loader is None:
332
+ loader.Loader.add_path_resolver(tag, path, kind)
333
+ loader.FullLoader.add_path_resolver(tag, path, kind)
334
+ loader.UnsafeLoader.add_path_resolver(tag, path, kind)
335
+ else:
336
+ Loader.add_path_resolver(tag, path, kind)
337
+ Dumper.add_path_resolver(tag, path, kind)
338
+
339
+ def add_constructor(tag, constructor, Loader=None):
340
+ """
341
+ Add a constructor for the given tag.
342
+ Constructor is a function that accepts a Loader instance
343
+ and a node object and produces the corresponding Python object.
344
+ """
345
+ if Loader is None:
346
+ loader.Loader.add_constructor(tag, constructor)
347
+ loader.FullLoader.add_constructor(tag, constructor)
348
+ loader.UnsafeLoader.add_constructor(tag, constructor)
349
+ else:
350
+ Loader.add_constructor(tag, constructor)
351
+
352
+ def add_multi_constructor(tag_prefix, multi_constructor, Loader=None):
353
+ """
354
+ Add a multi-constructor for the given tag prefix.
355
+ Multi-constructor is called for a node if its tag starts with tag_prefix.
356
+ Multi-constructor accepts a Loader instance, a tag suffix,
357
+ and a node object and produces the corresponding Python object.
358
+ """
359
+ if Loader is None:
360
+ loader.Loader.add_multi_constructor(tag_prefix, multi_constructor)
361
+ loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor)
362
+ loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor)
363
+ else:
364
+ Loader.add_multi_constructor(tag_prefix, multi_constructor)
365
+
366
+ def add_representer(data_type, representer, Dumper=Dumper):
367
+ """
368
+ Add a representer for the given type.
369
+ Representer is a function accepting a Dumper instance
370
+ and an instance of the given data type
371
+ and producing the corresponding representation node.
372
+ """
373
+ Dumper.add_representer(data_type, representer)
374
+
375
+ def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
376
+ """
377
+ Add a representer for the given type.
378
+ Multi-representer is a function accepting a Dumper instance
379
+ and an instance of the given data type or subtype
380
+ and producing the corresponding representation node.
381
+ """
382
+ Dumper.add_multi_representer(data_type, multi_representer)
383
+
384
+ class YAMLObjectMetaclass(type):
385
+ """
386
+ The metaclass for YAMLObject.
387
+ """
388
+ def __init__(cls, name, bases, kwds):
389
+ super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
390
+ if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
391
+ if isinstance(cls.yaml_loader, list):
392
+ for loader in cls.yaml_loader:
393
+ loader.add_constructor(cls.yaml_tag, cls.from_yaml)
394
+ else:
395
+ cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
396
+
397
+ cls.yaml_dumper.add_representer(cls, cls.to_yaml)
398
+
399
+ class YAMLObject(metaclass=YAMLObjectMetaclass):
400
+ """
401
+ An object that can dump itself to a YAML stream
402
+ and load itself from a YAML stream.
403
+ """
404
+
405
+ __slots__ = () # no direct instantiation, so allow immutable subclasses
406
+
407
+ yaml_loader = [Loader, FullLoader, UnsafeLoader]
408
+ yaml_dumper = Dumper
409
+
410
+ yaml_tag = None
411
+ yaml_flow_style = None
412
+
413
+ @classmethod
414
+ def from_yaml(cls, loader, node):
415
+ """
416
+ Convert a representation node to a Python object.
417
+ """
418
+ return loader.construct_yaml_object(node, cls)
419
+
420
+ @classmethod
421
+ def to_yaml(cls, dumper, data):
422
+ """
423
+ Convert a Python object to a representation node.
424
+ """
425
+ return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
426
+ flow_style=cls.yaml_flow_style)
427
+
@@ -0,0 +1,139 @@
1
+
2
+ __all__ = ['Composer', 'ComposerError']
3
+
4
+ from .error import MarkedYAMLError
5
+ from .events import *
6
+ from .nodes import *
7
+
8
+ class ComposerError(MarkedYAMLError):
9
+ pass
10
+
11
+ class Composer:
12
+
13
+ def __init__(self):
14
+ self.anchors = {}
15
+
16
+ def check_node(self):
17
+ # Drop the STREAM-START event.
18
+ if self.check_event(StreamStartEvent):
19
+ self.get_event()
20
+
21
+ # If there are more documents available?
22
+ return not self.check_event(StreamEndEvent)
23
+
24
+ def get_node(self):
25
+ # Get the root node of the next document.
26
+ if not self.check_event(StreamEndEvent):
27
+ return self.compose_document()
28
+
29
+ def get_single_node(self):
30
+ # Drop the STREAM-START event.
31
+ self.get_event()
32
+
33
+ # Compose a document if the stream is not empty.
34
+ document = None
35
+ if not self.check_event(StreamEndEvent):
36
+ document = self.compose_document()
37
+
38
+ # Ensure that the stream contains no more documents.
39
+ if not self.check_event(StreamEndEvent):
40
+ event = self.get_event()
41
+ raise ComposerError("expected a single document in the stream",
42
+ document.start_mark, "but found another document",
43
+ event.start_mark)
44
+
45
+ # Drop the STREAM-END event.
46
+ self.get_event()
47
+
48
+ return document
49
+
50
+ def compose_document(self):
51
+ # Drop the DOCUMENT-START event.
52
+ self.get_event()
53
+
54
+ # Compose the root node.
55
+ node = self.compose_node(None, None)
56
+
57
+ # Drop the DOCUMENT-END event.
58
+ self.get_event()
59
+
60
+ self.anchors = {}
61
+ return node
62
+
63
+ def compose_node(self, parent, index):
64
+ if self.check_event(AliasEvent):
65
+ event = self.get_event()
66
+ anchor = event.anchor
67
+ if anchor not in self.anchors:
68
+ raise ComposerError(None, None, "found undefined alias %r"
69
+ % anchor, event.start_mark)
70
+ return self.anchors[anchor]
71
+ event = self.peek_event()
72
+ anchor = event.anchor
73
+ if anchor is not None:
74
+ if anchor in self.anchors:
75
+ raise ComposerError("found duplicate anchor %r; first occurrence"
76
+ % anchor, self.anchors[anchor].start_mark,
77
+ "second occurrence", event.start_mark)
78
+ self.descend_resolver(parent, index)
79
+ if self.check_event(ScalarEvent):
80
+ node = self.compose_scalar_node(anchor)
81
+ elif self.check_event(SequenceStartEvent):
82
+ node = self.compose_sequence_node(anchor)
83
+ elif self.check_event(MappingStartEvent):
84
+ node = self.compose_mapping_node(anchor)
85
+ self.ascend_resolver()
86
+ return node
87
+
88
+ def compose_scalar_node(self, anchor):
89
+ event = self.get_event()
90
+ tag = event.tag
91
+ if tag is None or tag == '!':
92
+ tag = self.resolve(ScalarNode, event.value, event.implicit)
93
+ node = ScalarNode(tag, event.value,
94
+ event.start_mark, event.end_mark, style=event.style)
95
+ if anchor is not None:
96
+ self.anchors[anchor] = node
97
+ return node
98
+
99
+ def compose_sequence_node(self, anchor):
100
+ start_event = self.get_event()
101
+ tag = start_event.tag
102
+ if tag is None or tag == '!':
103
+ tag = self.resolve(SequenceNode, None, start_event.implicit)
104
+ node = SequenceNode(tag, [],
105
+ start_event.start_mark, None,
106
+ flow_style=start_event.flow_style)
107
+ if anchor is not None:
108
+ self.anchors[anchor] = node
109
+ index = 0
110
+ while not self.check_event(SequenceEndEvent):
111
+ node.value.append(self.compose_node(node, index))
112
+ index += 1
113
+ end_event = self.get_event()
114
+ node.end_mark = end_event.end_mark
115
+ return node
116
+
117
+ def compose_mapping_node(self, anchor):
118
+ start_event = self.get_event()
119
+ tag = start_event.tag
120
+ if tag is None or tag == '!':
121
+ tag = self.resolve(MappingNode, None, start_event.implicit)
122
+ node = MappingNode(tag, [],
123
+ start_event.start_mark, None,
124
+ flow_style=start_event.flow_style)
125
+ if anchor is not None:
126
+ self.anchors[anchor] = node
127
+ while not self.check_event(MappingEndEvent):
128
+ #key_event = self.peek_event()
129
+ item_key = self.compose_node(node, None)
130
+ #if item_key in node.value:
131
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
132
+ # "found duplicate key", key_event.start_mark)
133
+ item_value = self.compose_node(node, item_key)
134
+ #node.value[item_key] = item_value
135
+ node.value.append((item_key, item_value))
136
+ end_event = self.get_event()
137
+ node.end_mark = end_event.end_mark
138
+ return node
139
+