outerbounds 0.3.71__py3-none-any.whl → 0.3.75__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. outerbounds/_vendor/PyYAML.LICENSE +20 -0
  2. outerbounds/_vendor/__init__.py +0 -0
  3. outerbounds/_vendor/_yaml/__init__.py +34 -0
  4. outerbounds/_vendor/click/__init__.py +73 -0
  5. outerbounds/_vendor/click/_compat.py +626 -0
  6. outerbounds/_vendor/click/_termui_impl.py +717 -0
  7. outerbounds/_vendor/click/_textwrap.py +49 -0
  8. outerbounds/_vendor/click/_winconsole.py +279 -0
  9. outerbounds/_vendor/click/core.py +2998 -0
  10. outerbounds/_vendor/click/decorators.py +497 -0
  11. outerbounds/_vendor/click/exceptions.py +287 -0
  12. outerbounds/_vendor/click/formatting.py +301 -0
  13. outerbounds/_vendor/click/globals.py +68 -0
  14. outerbounds/_vendor/click/parser.py +529 -0
  15. outerbounds/_vendor/click/py.typed +0 -0
  16. outerbounds/_vendor/click/shell_completion.py +580 -0
  17. outerbounds/_vendor/click/termui.py +787 -0
  18. outerbounds/_vendor/click/testing.py +479 -0
  19. outerbounds/_vendor/click/types.py +1073 -0
  20. outerbounds/_vendor/click/utils.py +580 -0
  21. outerbounds/_vendor/click.LICENSE +28 -0
  22. outerbounds/_vendor/vendor_any.txt +2 -0
  23. outerbounds/_vendor/yaml/__init__.py +471 -0
  24. outerbounds/_vendor/yaml/composer.py +146 -0
  25. outerbounds/_vendor/yaml/constructor.py +862 -0
  26. outerbounds/_vendor/yaml/cyaml.py +177 -0
  27. outerbounds/_vendor/yaml/dumper.py +138 -0
  28. outerbounds/_vendor/yaml/emitter.py +1239 -0
  29. outerbounds/_vendor/yaml/error.py +94 -0
  30. outerbounds/_vendor/yaml/events.py +104 -0
  31. outerbounds/_vendor/yaml/loader.py +62 -0
  32. outerbounds/_vendor/yaml/nodes.py +51 -0
  33. outerbounds/_vendor/yaml/parser.py +629 -0
  34. outerbounds/_vendor/yaml/reader.py +208 -0
  35. outerbounds/_vendor/yaml/representer.py +378 -0
  36. outerbounds/_vendor/yaml/resolver.py +245 -0
  37. outerbounds/_vendor/yaml/scanner.py +1555 -0
  38. outerbounds/_vendor/yaml/serializer.py +127 -0
  39. outerbounds/_vendor/yaml/tokens.py +129 -0
  40. outerbounds/command_groups/cli.py +1 -1
  41. outerbounds/command_groups/local_setup_cli.py +1 -2
  42. outerbounds/command_groups/perimeters_cli.py +1 -2
  43. outerbounds/command_groups/workstations_cli.py +2 -2
  44. outerbounds/utils/kubeconfig.py +2 -2
  45. outerbounds/utils/metaflowconfig.py +1 -1
  46. outerbounds/vendor.py +159 -0
  47. {outerbounds-0.3.71.dist-info → outerbounds-0.3.75.dist-info}/METADATA +1 -3
  48. outerbounds-0.3.75.dist-info/RECORD +55 -0
  49. outerbounds-0.3.71.dist-info/RECORD +0 -15
  50. {outerbounds-0.3.71.dist-info → outerbounds-0.3.75.dist-info}/WHEEL +0 -0
  51. {outerbounds-0.3.71.dist-info → outerbounds-0.3.75.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,471 @@
1
+ from .error import *
2
+
3
+ from .tokens import *
4
+ from .events import *
5
+ from .nodes import *
6
+
7
+ from .loader import *
8
+ from .dumper import *
9
+
10
+ __version__ = "6.0.1"
11
+ try:
12
+ from .cyaml import *
13
+
14
+ __with_libyaml__ = True
15
+ except ImportError:
16
+ __with_libyaml__ = False
17
+
18
+ import io
19
+
20
+ # ------------------------------------------------------------------------------
21
+ # XXX "Warnings control" is now deprecated. Leaving in the API function to not
22
+ # break code that uses it.
23
+ # ------------------------------------------------------------------------------
24
+ def warnings(settings=None):
25
+ if settings is None:
26
+ return {}
27
+
28
+
29
+ # ------------------------------------------------------------------------------
30
+ def scan(stream, Loader=Loader):
31
+ """
32
+ Scan a YAML stream and produce scanning tokens.
33
+ """
34
+ loader = Loader(stream)
35
+ try:
36
+ while loader.check_token():
37
+ yield loader.get_token()
38
+ finally:
39
+ loader.dispose()
40
+
41
+
42
+ def parse(stream, Loader=Loader):
43
+ """
44
+ Parse a YAML stream and produce parsing events.
45
+ """
46
+ loader = Loader(stream)
47
+ try:
48
+ while loader.check_event():
49
+ yield loader.get_event()
50
+ finally:
51
+ loader.dispose()
52
+
53
+
54
+ def compose(stream, Loader=Loader):
55
+ """
56
+ Parse the first YAML document in a stream
57
+ and produce the corresponding representation tree.
58
+ """
59
+ loader = Loader(stream)
60
+ try:
61
+ return loader.get_single_node()
62
+ finally:
63
+ loader.dispose()
64
+
65
+
66
+ def compose_all(stream, Loader=Loader):
67
+ """
68
+ Parse all YAML documents in a stream
69
+ and produce corresponding representation trees.
70
+ """
71
+ loader = Loader(stream)
72
+ try:
73
+ while loader.check_node():
74
+ yield loader.get_node()
75
+ finally:
76
+ loader.dispose()
77
+
78
+
79
+ def load(stream, Loader):
80
+ """
81
+ Parse the first YAML document in a stream
82
+ and produce the corresponding Python object.
83
+ """
84
+ loader = Loader(stream)
85
+ try:
86
+ return loader.get_single_data()
87
+ finally:
88
+ loader.dispose()
89
+
90
+
91
+ def load_all(stream, Loader):
92
+ """
93
+ Parse all YAML documents in a stream
94
+ and produce corresponding Python objects.
95
+ """
96
+ loader = Loader(stream)
97
+ try:
98
+ while loader.check_data():
99
+ yield loader.get_data()
100
+ finally:
101
+ loader.dispose()
102
+
103
+
104
+ def full_load(stream):
105
+ """
106
+ Parse the first YAML document in a stream
107
+ and produce the corresponding Python object.
108
+
109
+ Resolve all tags except those known to be
110
+ unsafe on untrusted input.
111
+ """
112
+ return load(stream, FullLoader)
113
+
114
+
115
+ def full_load_all(stream):
116
+ """
117
+ Parse all YAML documents in a stream
118
+ and produce corresponding Python objects.
119
+
120
+ Resolve all tags except those known to be
121
+ unsafe on untrusted input.
122
+ """
123
+ return load_all(stream, FullLoader)
124
+
125
+
126
+ def safe_load(stream):
127
+ """
128
+ Parse the first YAML document in a stream
129
+ and produce the corresponding Python object.
130
+
131
+ Resolve only basic YAML tags. This is known
132
+ to be safe for untrusted input.
133
+ """
134
+ return load(stream, SafeLoader)
135
+
136
+
137
+ def safe_load_all(stream):
138
+ """
139
+ Parse all YAML documents in a stream
140
+ and produce corresponding Python objects.
141
+
142
+ Resolve only basic YAML tags. This is known
143
+ to be safe for untrusted input.
144
+ """
145
+ return load_all(stream, SafeLoader)
146
+
147
+
148
+ def unsafe_load(stream):
149
+ """
150
+ Parse the first YAML document in a stream
151
+ and produce the corresponding Python object.
152
+
153
+ Resolve all tags, even those known to be
154
+ unsafe on untrusted input.
155
+ """
156
+ return load(stream, UnsafeLoader)
157
+
158
+
159
+ def unsafe_load_all(stream):
160
+ """
161
+ Parse all YAML documents in a stream
162
+ and produce corresponding Python objects.
163
+
164
+ Resolve all tags, even those known to be
165
+ unsafe on untrusted input.
166
+ """
167
+ return load_all(stream, UnsafeLoader)
168
+
169
+
170
+ def emit(
171
+ events,
172
+ stream=None,
173
+ Dumper=Dumper,
174
+ canonical=None,
175
+ indent=None,
176
+ width=None,
177
+ allow_unicode=None,
178
+ line_break=None,
179
+ ):
180
+ """
181
+ Emit YAML parsing events into a stream.
182
+ If stream is None, return the produced string instead.
183
+ """
184
+ getvalue = None
185
+ if stream is None:
186
+ stream = io.StringIO()
187
+ getvalue = stream.getvalue
188
+ dumper = Dumper(
189
+ stream,
190
+ canonical=canonical,
191
+ indent=indent,
192
+ width=width,
193
+ allow_unicode=allow_unicode,
194
+ line_break=line_break,
195
+ )
196
+ try:
197
+ for event in events:
198
+ dumper.emit(event)
199
+ finally:
200
+ dumper.dispose()
201
+ if getvalue:
202
+ return getvalue()
203
+
204
+
205
+ def serialize_all(
206
+ nodes,
207
+ stream=None,
208
+ Dumper=Dumper,
209
+ canonical=None,
210
+ indent=None,
211
+ width=None,
212
+ allow_unicode=None,
213
+ line_break=None,
214
+ encoding=None,
215
+ explicit_start=None,
216
+ explicit_end=None,
217
+ version=None,
218
+ tags=None,
219
+ ):
220
+ """
221
+ Serialize a sequence of representation trees into a YAML stream.
222
+ If stream is None, return the produced string instead.
223
+ """
224
+ getvalue = None
225
+ if stream is None:
226
+ if encoding is None:
227
+ stream = io.StringIO()
228
+ else:
229
+ stream = io.BytesIO()
230
+ getvalue = stream.getvalue
231
+ dumper = Dumper(
232
+ stream,
233
+ canonical=canonical,
234
+ indent=indent,
235
+ width=width,
236
+ allow_unicode=allow_unicode,
237
+ line_break=line_break,
238
+ encoding=encoding,
239
+ version=version,
240
+ tags=tags,
241
+ explicit_start=explicit_start,
242
+ explicit_end=explicit_end,
243
+ )
244
+ try:
245
+ dumper.open()
246
+ for node in nodes:
247
+ dumper.serialize(node)
248
+ dumper.close()
249
+ finally:
250
+ dumper.dispose()
251
+ if getvalue:
252
+ return getvalue()
253
+
254
+
255
+ def serialize(node, stream=None, Dumper=Dumper, **kwds):
256
+ """
257
+ Serialize a representation tree into a YAML stream.
258
+ If stream is None, return the produced string instead.
259
+ """
260
+ return serialize_all([node], stream, Dumper=Dumper, **kwds)
261
+
262
+
263
+ def dump_all(
264
+ documents,
265
+ stream=None,
266
+ Dumper=Dumper,
267
+ default_style=None,
268
+ default_flow_style=False,
269
+ canonical=None,
270
+ indent=None,
271
+ width=None,
272
+ allow_unicode=None,
273
+ line_break=None,
274
+ encoding=None,
275
+ explicit_start=None,
276
+ explicit_end=None,
277
+ version=None,
278
+ tags=None,
279
+ sort_keys=True,
280
+ ):
281
+ """
282
+ Serialize a sequence of Python objects into a YAML stream.
283
+ If stream is None, return the produced string instead.
284
+ """
285
+ getvalue = None
286
+ if stream is None:
287
+ if encoding is None:
288
+ stream = io.StringIO()
289
+ else:
290
+ stream = io.BytesIO()
291
+ getvalue = stream.getvalue
292
+ dumper = Dumper(
293
+ stream,
294
+ default_style=default_style,
295
+ default_flow_style=default_flow_style,
296
+ canonical=canonical,
297
+ indent=indent,
298
+ width=width,
299
+ allow_unicode=allow_unicode,
300
+ line_break=line_break,
301
+ encoding=encoding,
302
+ version=version,
303
+ tags=tags,
304
+ explicit_start=explicit_start,
305
+ explicit_end=explicit_end,
306
+ sort_keys=sort_keys,
307
+ )
308
+ try:
309
+ dumper.open()
310
+ for data in documents:
311
+ dumper.represent(data)
312
+ dumper.close()
313
+ finally:
314
+ dumper.dispose()
315
+ if getvalue:
316
+ return getvalue()
317
+
318
+
319
+ def dump(data, stream=None, Dumper=Dumper, **kwds):
320
+ """
321
+ Serialize a Python object into a YAML stream.
322
+ If stream is None, return the produced string instead.
323
+ """
324
+ return dump_all([data], stream, Dumper=Dumper, **kwds)
325
+
326
+
327
+ def safe_dump_all(documents, stream=None, **kwds):
328
+ """
329
+ Serialize a sequence of Python objects into a YAML stream.
330
+ Produce only basic YAML tags.
331
+ If stream is None, return the produced string instead.
332
+ """
333
+ return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
334
+
335
+
336
+ def safe_dump(data, stream=None, **kwds):
337
+ """
338
+ Serialize a Python object into a YAML stream.
339
+ Produce only basic YAML tags.
340
+ If stream is None, return the produced string instead.
341
+ """
342
+ return dump_all([data], stream, Dumper=SafeDumper, **kwds)
343
+
344
+
345
+ def add_implicit_resolver(tag, regexp, first=None, Loader=None, Dumper=Dumper):
346
+ """
347
+ Add an implicit scalar detector.
348
+ If an implicit scalar value matches the given regexp,
349
+ the corresponding tag is assigned to the scalar.
350
+ first is a sequence of possible initial characters or None.
351
+ """
352
+ if Loader is None:
353
+ loader.Loader.add_implicit_resolver(tag, regexp, first)
354
+ loader.FullLoader.add_implicit_resolver(tag, regexp, first)
355
+ loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first)
356
+ else:
357
+ Loader.add_implicit_resolver(tag, regexp, first)
358
+ Dumper.add_implicit_resolver(tag, regexp, first)
359
+
360
+
361
+ def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper):
362
+ """
363
+ Add a path based resolver for the given tag.
364
+ A path is a list of keys that forms a path
365
+ to a node in the representation tree.
366
+ Keys can be string values, integers, or None.
367
+ """
368
+ if Loader is None:
369
+ loader.Loader.add_path_resolver(tag, path, kind)
370
+ loader.FullLoader.add_path_resolver(tag, path, kind)
371
+ loader.UnsafeLoader.add_path_resolver(tag, path, kind)
372
+ else:
373
+ Loader.add_path_resolver(tag, path, kind)
374
+ Dumper.add_path_resolver(tag, path, kind)
375
+
376
+
377
+ def add_constructor(tag, constructor, Loader=None):
378
+ """
379
+ Add a constructor for the given tag.
380
+ Constructor is a function that accepts a Loader instance
381
+ and a node object and produces the corresponding Python object.
382
+ """
383
+ if Loader is None:
384
+ loader.Loader.add_constructor(tag, constructor)
385
+ loader.FullLoader.add_constructor(tag, constructor)
386
+ loader.UnsafeLoader.add_constructor(tag, constructor)
387
+ else:
388
+ Loader.add_constructor(tag, constructor)
389
+
390
+
391
+ def add_multi_constructor(tag_prefix, multi_constructor, Loader=None):
392
+ """
393
+ Add a multi-constructor for the given tag prefix.
394
+ Multi-constructor is called for a node if its tag starts with tag_prefix.
395
+ Multi-constructor accepts a Loader instance, a tag suffix,
396
+ and a node object and produces the corresponding Python object.
397
+ """
398
+ if Loader is None:
399
+ loader.Loader.add_multi_constructor(tag_prefix, multi_constructor)
400
+ loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor)
401
+ loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor)
402
+ else:
403
+ Loader.add_multi_constructor(tag_prefix, multi_constructor)
404
+
405
+
406
+ def add_representer(data_type, representer, Dumper=Dumper):
407
+ """
408
+ Add a representer for the given type.
409
+ Representer is a function accepting a Dumper instance
410
+ and an instance of the given data type
411
+ and producing the corresponding representation node.
412
+ """
413
+ Dumper.add_representer(data_type, representer)
414
+
415
+
416
+ def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
417
+ """
418
+ Add a representer for the given type.
419
+ Multi-representer is a function accepting a Dumper instance
420
+ and an instance of the given data type or subtype
421
+ and producing the corresponding representation node.
422
+ """
423
+ Dumper.add_multi_representer(data_type, multi_representer)
424
+
425
+
426
+ class YAMLObjectMetaclass(type):
427
+ """
428
+ The metaclass for YAMLObject.
429
+ """
430
+
431
+ def __init__(cls, name, bases, kwds):
432
+ super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
433
+ if "yaml_tag" in kwds and kwds["yaml_tag"] is not None:
434
+ if isinstance(cls.yaml_loader, list):
435
+ for loader in cls.yaml_loader:
436
+ loader.add_constructor(cls.yaml_tag, cls.from_yaml)
437
+ else:
438
+ cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
439
+
440
+ cls.yaml_dumper.add_representer(cls, cls.to_yaml)
441
+
442
+
443
+ class YAMLObject(metaclass=YAMLObjectMetaclass):
444
+ """
445
+ An object that can dump itself to a YAML stream
446
+ and load itself from a YAML stream.
447
+ """
448
+
449
+ __slots__ = () # no direct instantiation, so allow immutable subclasses
450
+
451
+ yaml_loader = [Loader, FullLoader, UnsafeLoader]
452
+ yaml_dumper = Dumper
453
+
454
+ yaml_tag = None
455
+ yaml_flow_style = None
456
+
457
+ @classmethod
458
+ def from_yaml(cls, loader, node):
459
+ """
460
+ Convert a representation node to a Python object.
461
+ """
462
+ return loader.construct_yaml_object(node, cls)
463
+
464
+ @classmethod
465
+ def to_yaml(cls, dumper, data):
466
+ """
467
+ Convert a Python object to a representation node.
468
+ """
469
+ return dumper.represent_yaml_object(
470
+ cls.yaml_tag, data, cls, flow_style=cls.yaml_flow_style
471
+ )
@@ -0,0 +1,146 @@
1
+ __all__ = ["Composer", "ComposerError"]
2
+
3
+ from .error import MarkedYAMLError
4
+ from .events import *
5
+ from .nodes import *
6
+
7
+
8
+ class ComposerError(MarkedYAMLError):
9
+ pass
10
+
11
+
12
+ class Composer:
13
+ def __init__(self):
14
+ self.anchors = {}
15
+
16
+ def check_node(self):
17
+ # Drop the STREAM-START event.
18
+ if self.check_event(StreamStartEvent):
19
+ self.get_event()
20
+
21
+ # If there are more documents available?
22
+ return not self.check_event(StreamEndEvent)
23
+
24
+ def get_node(self):
25
+ # Get the root node of the next document.
26
+ if not self.check_event(StreamEndEvent):
27
+ return self.compose_document()
28
+
29
+ def get_single_node(self):
30
+ # Drop the STREAM-START event.
31
+ self.get_event()
32
+
33
+ # Compose a document if the stream is not empty.
34
+ document = None
35
+ if not self.check_event(StreamEndEvent):
36
+ document = self.compose_document()
37
+
38
+ # Ensure that the stream contains no more documents.
39
+ if not self.check_event(StreamEndEvent):
40
+ event = self.get_event()
41
+ raise ComposerError(
42
+ "expected a single document in the stream",
43
+ document.start_mark,
44
+ "but found another document",
45
+ event.start_mark,
46
+ )
47
+
48
+ # Drop the STREAM-END event.
49
+ self.get_event()
50
+
51
+ return document
52
+
53
+ def compose_document(self):
54
+ # Drop the DOCUMENT-START event.
55
+ self.get_event()
56
+
57
+ # Compose the root node.
58
+ node = self.compose_node(None, None)
59
+
60
+ # Drop the DOCUMENT-END event.
61
+ self.get_event()
62
+
63
+ self.anchors = {}
64
+ return node
65
+
66
+ def compose_node(self, parent, index):
67
+ if self.check_event(AliasEvent):
68
+ event = self.get_event()
69
+ anchor = event.anchor
70
+ if anchor not in self.anchors:
71
+ raise ComposerError(
72
+ None, None, "found undefined alias %r" % anchor, event.start_mark
73
+ )
74
+ return self.anchors[anchor]
75
+ event = self.peek_event()
76
+ anchor = event.anchor
77
+ if anchor is not None:
78
+ if anchor in self.anchors:
79
+ raise ComposerError(
80
+ "found duplicate anchor %r; first occurrence" % anchor,
81
+ self.anchors[anchor].start_mark,
82
+ "second occurrence",
83
+ event.start_mark,
84
+ )
85
+ self.descend_resolver(parent, index)
86
+ if self.check_event(ScalarEvent):
87
+ node = self.compose_scalar_node(anchor)
88
+ elif self.check_event(SequenceStartEvent):
89
+ node = self.compose_sequence_node(anchor)
90
+ elif self.check_event(MappingStartEvent):
91
+ node = self.compose_mapping_node(anchor)
92
+ self.ascend_resolver()
93
+ return node
94
+
95
+ def compose_scalar_node(self, anchor):
96
+ event = self.get_event()
97
+ tag = event.tag
98
+ if tag is None or tag == "!":
99
+ tag = self.resolve(ScalarNode, event.value, event.implicit)
100
+ node = ScalarNode(
101
+ tag, event.value, event.start_mark, event.end_mark, style=event.style
102
+ )
103
+ if anchor is not None:
104
+ self.anchors[anchor] = node
105
+ return node
106
+
107
+ def compose_sequence_node(self, anchor):
108
+ start_event = self.get_event()
109
+ tag = start_event.tag
110
+ if tag is None or tag == "!":
111
+ tag = self.resolve(SequenceNode, None, start_event.implicit)
112
+ node = SequenceNode(
113
+ tag, [], start_event.start_mark, None, flow_style=start_event.flow_style
114
+ )
115
+ if anchor is not None:
116
+ self.anchors[anchor] = node
117
+ index = 0
118
+ while not self.check_event(SequenceEndEvent):
119
+ node.value.append(self.compose_node(node, index))
120
+ index += 1
121
+ end_event = self.get_event()
122
+ node.end_mark = end_event.end_mark
123
+ return node
124
+
125
+ def compose_mapping_node(self, anchor):
126
+ start_event = self.get_event()
127
+ tag = start_event.tag
128
+ if tag is None or tag == "!":
129
+ tag = self.resolve(MappingNode, None, start_event.implicit)
130
+ node = MappingNode(
131
+ tag, [], start_event.start_mark, None, flow_style=start_event.flow_style
132
+ )
133
+ if anchor is not None:
134
+ self.anchors[anchor] = node
135
+ while not self.check_event(MappingEndEvent):
136
+ # key_event = self.peek_event()
137
+ item_key = self.compose_node(node, None)
138
+ # if item_key in node.value:
139
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
140
+ # "found duplicate key", key_event.start_mark)
141
+ item_value = self.compose_node(node, item_key)
142
+ # node.value[item_key] = item_value
143
+ node.value.append((item_key, item_value))
144
+ end_event = self.get_event()
145
+ node.end_mark = end_event.end_mark
146
+ return node