omextra 0.0.0.dev472__py3-none-any.whl → 0.0.0.dev485__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- omextra/__about__.py +2 -0
- omextra/collections/__init__.py +0 -0
- omextra/collections/hamt/LICENSE +35 -0
- omextra/collections/hamt/__init__.py +0 -0
- omextra/collections/hamt/_hamt.c +3621 -0
- omextra/formats/goyaml/LICENSE +16 -0
- omextra/formats/goyaml/__init__.py +29 -0
- omextra/formats/goyaml/ast.py +2217 -0
- omextra/formats/goyaml/errors.py +49 -0
- omextra/formats/goyaml/parsing.py +2332 -0
- omextra/formats/goyaml/scanning.py +1888 -0
- omextra/formats/goyaml/tokens.py +998 -0
- omextra/text/abnf/LICENSE +16 -0
- omextra/text/abnf/__init__.py +79 -0
- omextra/text/abnf/base.py +313 -0
- omextra/text/abnf/core.py +141 -0
- omextra/text/abnf/errors.py +10 -0
- omextra/text/abnf/meta.py +583 -0
- omextra/text/abnf/parsers.py +343 -0
- omextra/text/abnf/utils.py +76 -0
- omextra/text/abnf/visitors.py +55 -0
- {omextra-0.0.0.dev472.dist-info → omextra-0.0.0.dev485.dist-info}/METADATA +2 -2
- {omextra-0.0.0.dev472.dist-info → omextra-0.0.0.dev485.dist-info}/RECORD +27 -7
- {omextra-0.0.0.dev472.dist-info → omextra-0.0.0.dev485.dist-info}/WHEEL +0 -0
- {omextra-0.0.0.dev472.dist-info → omextra-0.0.0.dev485.dist-info}/entry_points.txt +0 -0
- {omextra-0.0.0.dev472.dist-info → omextra-0.0.0.dev485.dist-info}/licenses/LICENSE +0 -0
- {omextra-0.0.0.dev472.dist-info → omextra-0.0.0.dev485.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,2217 @@
|
|
|
1
|
+
# ruff: noqa: UP006 UP007 UP043 UP045
|
|
2
|
+
# @omlish-lite
|
|
3
|
+
import abc
|
|
4
|
+
import dataclasses as dc
|
|
5
|
+
import enum
|
|
6
|
+
import io
|
|
7
|
+
import typing as ta
|
|
8
|
+
import unicodedata
|
|
9
|
+
|
|
10
|
+
from omlish.lite.abstract import Abstract
|
|
11
|
+
from omlish.lite.check import check
|
|
12
|
+
from omlish.lite.dataclasses import dataclass_field_required
|
|
13
|
+
|
|
14
|
+
from . import tokens
|
|
15
|
+
from .errors import EofYamlError
|
|
16
|
+
from .errors import YamlError
|
|
17
|
+
from .errors import YamlErrorOr
|
|
18
|
+
from .errors import yaml_error
|
|
19
|
+
from .tokens import YamlNumberType
|
|
20
|
+
from .tokens import YamlPosition
|
|
21
|
+
from .tokens import YamlToken
|
|
22
|
+
from .tokens import YamlTokenType
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
##
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
ERR_INVALID_TOKEN_TYPE = yaml_error('invalid token type')
|
|
29
|
+
ERR_INVALID_ANCHOR_NAME = yaml_error('invalid anchor name')
|
|
30
|
+
ERR_INVALID_ALIAS_NAME = yaml_error('invalid alias name')
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class YamlNodeType(enum.Enum):
|
|
34
|
+
# UNKNOWN type identifier for default
|
|
35
|
+
UNKNOWN = enum.auto()
|
|
36
|
+
# DOCUMENT type identifier for document node
|
|
37
|
+
DOCUMENT = enum.auto()
|
|
38
|
+
# NULL type identifier for null node
|
|
39
|
+
NULL = enum.auto()
|
|
40
|
+
# BOOL type identifier for boolean node
|
|
41
|
+
BOOL = enum.auto()
|
|
42
|
+
# INTEGER type identifier for integer node
|
|
43
|
+
INTEGER = enum.auto()
|
|
44
|
+
# FLOAT type identifier for float node
|
|
45
|
+
FLOAT = enum.auto()
|
|
46
|
+
# INFINITY type identifier for infinity node
|
|
47
|
+
INFINITY = enum.auto()
|
|
48
|
+
# NAN type identifier for nan node
|
|
49
|
+
NAN = enum.auto()
|
|
50
|
+
# STRING type identifier for string node
|
|
51
|
+
STRING = enum.auto()
|
|
52
|
+
# MERGE_KEY type identifier for merge key node
|
|
53
|
+
MERGE_KEY = enum.auto()
|
|
54
|
+
# LITERAL type identifier for literal node
|
|
55
|
+
LITERAL = enum.auto()
|
|
56
|
+
# MAPPING type identifier for mapping node
|
|
57
|
+
MAPPING = enum.auto()
|
|
58
|
+
# MAPPING_KEY type identifier for mapping key node
|
|
59
|
+
MAPPING_KEY = enum.auto()
|
|
60
|
+
# MAPPING_VALUE type identifier for mapping value node
|
|
61
|
+
MAPPING_VALUE = enum.auto()
|
|
62
|
+
# SEQUENCE type identifier for sequence node
|
|
63
|
+
SEQUENCE = enum.auto()
|
|
64
|
+
# SEQUENCE_ENTRY type identifier for sequence entry node
|
|
65
|
+
SEQUENCE_ENTRY = enum.auto()
|
|
66
|
+
# ANCHOR type identifier for anchor node
|
|
67
|
+
ANCHOR = enum.auto()
|
|
68
|
+
# ALIAS type identifier for alias node
|
|
69
|
+
ALIAS = enum.auto()
|
|
70
|
+
# DIRECTIVE type identifier for directive node
|
|
71
|
+
DIRECTIVE = enum.auto()
|
|
72
|
+
# TAG type identifier for tag node
|
|
73
|
+
TAG = enum.auto()
|
|
74
|
+
# COMMENT type identifier for comment node
|
|
75
|
+
COMMENT = enum.auto()
|
|
76
|
+
# COMMENT_GROUP type identifier for comment group node
|
|
77
|
+
COMMENT_GROUP = enum.auto()
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
# String node type identifier to YAML Structure name based on https://yaml.org/spec/1.2/spec.html
|
|
81
|
+
YAML_NODE_TYPE_YAML_NAMES: ta.Mapping[YamlNodeType, str] = {
|
|
82
|
+
YamlNodeType.UNKNOWN: 'unknown',
|
|
83
|
+
YamlNodeType.DOCUMENT: 'document',
|
|
84
|
+
YamlNodeType.NULL: 'null',
|
|
85
|
+
YamlNodeType.BOOL: 'boolean',
|
|
86
|
+
YamlNodeType.INTEGER: 'int',
|
|
87
|
+
YamlNodeType.FLOAT: 'float',
|
|
88
|
+
YamlNodeType.INFINITY: 'inf',
|
|
89
|
+
YamlNodeType.NAN: 'nan',
|
|
90
|
+
YamlNodeType.STRING: 'string',
|
|
91
|
+
YamlNodeType.MERGE_KEY: 'merge key',
|
|
92
|
+
YamlNodeType.LITERAL: 'scalar',
|
|
93
|
+
YamlNodeType.MAPPING: 'mapping',
|
|
94
|
+
YamlNodeType.MAPPING_KEY: 'key',
|
|
95
|
+
YamlNodeType.MAPPING_VALUE: 'value',
|
|
96
|
+
YamlNodeType.SEQUENCE: 'sequence',
|
|
97
|
+
YamlNodeType.SEQUENCE_ENTRY: 'value',
|
|
98
|
+
YamlNodeType.ANCHOR: 'anchor',
|
|
99
|
+
YamlNodeType.ALIAS: 'alias',
|
|
100
|
+
YamlNodeType.DIRECTIVE: 'directive',
|
|
101
|
+
YamlNodeType.TAG: 'tag',
|
|
102
|
+
YamlNodeType.COMMENT: 'comment',
|
|
103
|
+
YamlNodeType.COMMENT_GROUP: 'comment',
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
##
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
# Node type of node
|
|
111
|
+
class YamlNode(Abstract):
|
|
112
|
+
# io.Reader
|
|
113
|
+
|
|
114
|
+
def __str__(self) -> ta.NoReturn:
|
|
115
|
+
raise TypeError
|
|
116
|
+
|
|
117
|
+
@abc.abstractmethod
|
|
118
|
+
def string(self) -> str:
|
|
119
|
+
# FIXME: migrate off - ensure all sprintfy things explicitly call .string()
|
|
120
|
+
raise NotImplementedError
|
|
121
|
+
|
|
122
|
+
# get_token returns token instance
|
|
123
|
+
@abc.abstractmethod
|
|
124
|
+
def get_token(self) -> ta.Optional[YamlToken]:
|
|
125
|
+
raise NotImplementedError
|
|
126
|
+
|
|
127
|
+
# type returns type of node
|
|
128
|
+
@abc.abstractmethod
|
|
129
|
+
def type(self) -> YamlNodeType:
|
|
130
|
+
raise NotImplementedError
|
|
131
|
+
|
|
132
|
+
# add_column add column number to child nodes recursively
|
|
133
|
+
@abc.abstractmethod
|
|
134
|
+
def add_column(self, column: int) -> None:
|
|
135
|
+
raise NotImplementedError
|
|
136
|
+
|
|
137
|
+
# set_comment set comment token to node
|
|
138
|
+
@abc.abstractmethod
|
|
139
|
+
def set_comment(self, node: ta.Optional['CommentGroupYamlNode']) -> ta.Optional[YamlError]:
|
|
140
|
+
raise NotImplementedError
|
|
141
|
+
|
|
142
|
+
# comment returns comment token instance
|
|
143
|
+
@abc.abstractmethod
|
|
144
|
+
def get_comment(self) -> ta.Optional['CommentGroupYamlNode']:
|
|
145
|
+
raise NotImplementedError
|
|
146
|
+
|
|
147
|
+
# get_path returns YAMLPath for the current node
|
|
148
|
+
@abc.abstractmethod
|
|
149
|
+
def get_path(self) -> str:
|
|
150
|
+
raise NotImplementedError
|
|
151
|
+
|
|
152
|
+
# set_path set YAMLPath for the current node
|
|
153
|
+
@abc.abstractmethod
|
|
154
|
+
def set_path(self, path: str) -> None:
|
|
155
|
+
raise NotImplementedError
|
|
156
|
+
|
|
157
|
+
# marshal_yaml
|
|
158
|
+
@abc.abstractmethod
|
|
159
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
160
|
+
raise NotImplementedError
|
|
161
|
+
|
|
162
|
+
# already read length
|
|
163
|
+
@abc.abstractmethod
|
|
164
|
+
def read_len(self) -> int:
|
|
165
|
+
raise NotImplementedError
|
|
166
|
+
|
|
167
|
+
# append read length
|
|
168
|
+
@abc.abstractmethod
|
|
169
|
+
def append_read_len(self, n: int) -> None:
|
|
170
|
+
raise NotImplementedError
|
|
171
|
+
|
|
172
|
+
# clean read length
|
|
173
|
+
@abc.abstractmethod
|
|
174
|
+
def clear_len(self) -> None:
|
|
175
|
+
raise NotImplementedError
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
# MapKeyNode type for map key node
|
|
179
|
+
class MapKeyYamlNode(YamlNode, Abstract):
|
|
180
|
+
@abc.abstractmethod
|
|
181
|
+
def is_merge_key(self) -> bool:
|
|
182
|
+
raise NotImplementedError
|
|
183
|
+
|
|
184
|
+
# String node to text without comment
|
|
185
|
+
@abc.abstractmethod
|
|
186
|
+
def string_without_comment(self) -> str:
|
|
187
|
+
raise NotImplementedError
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
# ScalarNode type for scalar node
|
|
191
|
+
class ScalarYamlNode(MapKeyYamlNode, Abstract):
|
|
192
|
+
@abc.abstractmethod
|
|
193
|
+
def get_value(self) -> ta.Any:
|
|
194
|
+
raise NotImplementedError
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
##
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
@dc.dataclass()
|
|
201
|
+
class BaseYamlNode(YamlNode, Abstract):
|
|
202
|
+
path: str = ''
|
|
203
|
+
comment: ta.Optional['CommentGroupYamlNode'] = None
|
|
204
|
+
cur_read: int = 0
|
|
205
|
+
|
|
206
|
+
def read_len(self) -> int:
|
|
207
|
+
return self.cur_read
|
|
208
|
+
|
|
209
|
+
def clear_len(self) -> None:
|
|
210
|
+
self.cur_read = 0
|
|
211
|
+
|
|
212
|
+
def append_read_len(self, l: int) -> None:
|
|
213
|
+
self.cur_read += l
|
|
214
|
+
|
|
215
|
+
# get_path returns YAMLPath for the current node.
|
|
216
|
+
@ta.final
|
|
217
|
+
def get_path(self: ta.Optional['BaseYamlNode']) -> str:
|
|
218
|
+
if self is None:
|
|
219
|
+
return ''
|
|
220
|
+
return self.path
|
|
221
|
+
|
|
222
|
+
# set_path set YAMLPath for the current node.
|
|
223
|
+
@ta.final
|
|
224
|
+
def set_path(self: ta.Optional['BaseYamlNode'], path: str) -> None:
|
|
225
|
+
if self is None:
|
|
226
|
+
return
|
|
227
|
+
self.path = path
|
|
228
|
+
|
|
229
|
+
# get_comment returns comment token instance
|
|
230
|
+
def get_comment(self) -> ta.Optional['CommentGroupYamlNode']:
|
|
231
|
+
return self.comment
|
|
232
|
+
|
|
233
|
+
# set_comment set comment token
|
|
234
|
+
def set_comment(self, node: ta.Optional['CommentGroupYamlNode']) -> ta.Optional[YamlError]:
|
|
235
|
+
self.comment = node
|
|
236
|
+
return None
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def add_comment_string(base: str, node: 'CommentGroupYamlNode') -> str:
|
|
240
|
+
return f'{base} {node.string()}'
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
##
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def read_node(p: str, node: YamlNode) -> YamlErrorOr[int]:
|
|
247
|
+
s = node.string()
|
|
248
|
+
read_len = node.read_len()
|
|
249
|
+
remain = len(s) - read_len
|
|
250
|
+
if remain == 0:
|
|
251
|
+
node.clear_len()
|
|
252
|
+
return EofYamlError()
|
|
253
|
+
|
|
254
|
+
size = min(remain, len(p))
|
|
255
|
+
for idx, b in enumerate(s[read_len:read_len + size]):
|
|
256
|
+
p[idx] = b # type: ignore[index] # FIXME: lol
|
|
257
|
+
|
|
258
|
+
node.append_read_len(size)
|
|
259
|
+
return size
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def check_line_break(t: YamlToken) -> bool:
|
|
263
|
+
if t.prev is not None:
|
|
264
|
+
lbc = '\n'
|
|
265
|
+
prev = t.prev
|
|
266
|
+
adjustment = 0
|
|
267
|
+
# if the previous type is sequence entry use the previous type for that
|
|
268
|
+
if prev.type == YamlTokenType.SEQUENCE_ENTRY:
|
|
269
|
+
# as well as switching to previous type count any new lines in origin to account for:
|
|
270
|
+
# -
|
|
271
|
+
# b: c
|
|
272
|
+
adjustment = t.origin.rstrip(lbc).count(lbc)
|
|
273
|
+
if prev.prev is not None:
|
|
274
|
+
prev = prev.prev
|
|
275
|
+
|
|
276
|
+
line_diff = t.position.line - prev.position.line - 1
|
|
277
|
+
if line_diff > 0:
|
|
278
|
+
if prev.type == YamlTokenType.STRING:
|
|
279
|
+
# Remove any line breaks included in multiline string
|
|
280
|
+
adjustment += prev.origin.strip().rstrip(lbc).count(lbc)
|
|
281
|
+
|
|
282
|
+
# Due to the way that comment parsing works its assumed that when a null value does not have new line in
|
|
283
|
+
# origin it was squashed therefore difference is ignored.
|
|
284
|
+
# foo:
|
|
285
|
+
# bar:
|
|
286
|
+
# # comment
|
|
287
|
+
# baz: 1
|
|
288
|
+
# becomes
|
|
289
|
+
# foo:
|
|
290
|
+
# bar: null # comment
|
|
291
|
+
#
|
|
292
|
+
# baz: 1
|
|
293
|
+
if prev.type in (YamlTokenType.NULL, YamlTokenType.IMPLICIT_NULL):
|
|
294
|
+
return prev.origin.count(lbc) > 0
|
|
295
|
+
|
|
296
|
+
if line_diff - adjustment > 0:
|
|
297
|
+
return True
|
|
298
|
+
|
|
299
|
+
return False
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
##
|
|
303
|
+
|
|
304
|
+
|
|
305
|
+
# Null create node for null value
|
|
306
|
+
def null(tk: YamlToken) -> 'NullYamlNode':
|
|
307
|
+
return NullYamlNode(
|
|
308
|
+
token=tk,
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
_BOOL_TRUE_STRS = {'1', 't', 'T', 'true', 'TRUE', 'True'}
|
|
313
|
+
_BOOL_FALSE_STRS = {'0', 'f', 'F', 'false', 'FALSE', 'False'}
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def _parse_bool(s: str) -> bool:
|
|
317
|
+
if s in _BOOL_TRUE_STRS:
|
|
318
|
+
return True
|
|
319
|
+
if s in _BOOL_FALSE_STRS:
|
|
320
|
+
return False
|
|
321
|
+
raise ValueError(f'"{s}" is not a valid boolean string')
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
# bool_ create node for boolean value
|
|
325
|
+
def bool_(tk: YamlToken) -> 'BoolYamlNode':
|
|
326
|
+
b = _parse_bool(tk.value)
|
|
327
|
+
return BoolYamlNode(
|
|
328
|
+
token=tk,
|
|
329
|
+
value=b,
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
# integer create node for integer value
|
|
334
|
+
def integer(tk: YamlToken) -> 'IntegerYamlNode':
|
|
335
|
+
v: ta.Any = None
|
|
336
|
+
if (num := tokens.to_number(tk.value)) is not None:
|
|
337
|
+
v = num.value
|
|
338
|
+
|
|
339
|
+
return IntegerYamlNode(
|
|
340
|
+
token=tk,
|
|
341
|
+
value=v,
|
|
342
|
+
)
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
# float_ create node for float value
|
|
346
|
+
def float_(tk: YamlToken) -> 'FloatYamlNode':
|
|
347
|
+
v: float = 0.
|
|
348
|
+
if (num := tokens.to_number(tk.value)) is not None and num.type == YamlNumberType.FLOAT:
|
|
349
|
+
if isinstance(num.value, float):
|
|
350
|
+
v = num.value
|
|
351
|
+
|
|
352
|
+
return FloatYamlNode(
|
|
353
|
+
token=tk,
|
|
354
|
+
value=v,
|
|
355
|
+
)
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
# infinity create node for .inf or -.inf value
|
|
359
|
+
def infinity(tk: YamlToken) -> 'InfinityYamlNode':
|
|
360
|
+
if tk.value in ('.inf', '.Inf', '.INF'):
|
|
361
|
+
value = float('inf')
|
|
362
|
+
elif tk.value in ('-.inf', '-.Inf', '-.INF'):
|
|
363
|
+
value = float('-inf')
|
|
364
|
+
node = InfinityYamlNode(
|
|
365
|
+
token=tk,
|
|
366
|
+
value=value,
|
|
367
|
+
)
|
|
368
|
+
return node
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
# nan create node for .nan value
|
|
372
|
+
def nan(tk: YamlToken) -> 'NanYamlNode':
|
|
373
|
+
return NanYamlNode(
|
|
374
|
+
token=tk,
|
|
375
|
+
)
|
|
376
|
+
|
|
377
|
+
|
|
378
|
+
# string create node for string value
|
|
379
|
+
def string(tk: YamlToken) -> 'StringYamlNode':
|
|
380
|
+
return StringYamlNode(
|
|
381
|
+
token=tk,
|
|
382
|
+
value=tk.value,
|
|
383
|
+
)
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
# comment create node for comment
|
|
387
|
+
def comment(tk: ta.Optional[YamlToken]) -> 'CommentYamlNode':
|
|
388
|
+
return CommentYamlNode(
|
|
389
|
+
token=tk,
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
|
|
393
|
+
def comment_group(comments: ta.Iterable[ta.Optional[YamlToken]]) -> 'CommentGroupYamlNode':
|
|
394
|
+
nodes: ta.List[CommentYamlNode] = []
|
|
395
|
+
for c in comments:
|
|
396
|
+
nodes.append(comment(c))
|
|
397
|
+
|
|
398
|
+
return CommentGroupYamlNode(
|
|
399
|
+
comments=nodes,
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
# merge_key create node for merge key ( << )
|
|
404
|
+
def merge_key(tk: YamlToken) -> 'MergeKeyYamlNode':
|
|
405
|
+
return MergeKeyYamlNode(
|
|
406
|
+
token=tk,
|
|
407
|
+
)
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
# mapping create node for map
|
|
411
|
+
def mapping(tk: YamlToken, is_flow_style: bool, *values: 'MappingValueYamlNode') -> 'MappingYamlNode':
|
|
412
|
+
node = MappingYamlNode(
|
|
413
|
+
start=tk,
|
|
414
|
+
is_flow_style=is_flow_style,
|
|
415
|
+
values=[],
|
|
416
|
+
)
|
|
417
|
+
node.values.extend(values)
|
|
418
|
+
return node
|
|
419
|
+
|
|
420
|
+
|
|
421
|
+
# mapping_value create node for mapping value
|
|
422
|
+
def mapping_value(tk: YamlToken, key: 'MapKeyYamlNode', value: YamlNode) -> 'MappingValueYamlNode':
|
|
423
|
+
return MappingValueYamlNode(
|
|
424
|
+
start=tk,
|
|
425
|
+
key=key,
|
|
426
|
+
value=value,
|
|
427
|
+
)
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
# mapping_key create node for map key ( '?' ).
|
|
431
|
+
def mapping_key(tk: YamlToken) -> 'MappingKeyYamlNode':
|
|
432
|
+
return MappingKeyYamlNode(
|
|
433
|
+
start=tk,
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
# sequence create node for sequence
|
|
438
|
+
def sequence(tk: YamlToken, is_flow_style: bool) -> 'SequenceYamlNode':
|
|
439
|
+
return SequenceYamlNode(
|
|
440
|
+
start=tk,
|
|
441
|
+
is_flow_style=is_flow_style,
|
|
442
|
+
values=[],
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
def anchor(tk: YamlToken) -> 'AnchorYamlNode':
|
|
447
|
+
return AnchorYamlNode(
|
|
448
|
+
start=tk,
|
|
449
|
+
)
|
|
450
|
+
|
|
451
|
+
|
|
452
|
+
def alias(tk: YamlToken) -> 'AliasYamlNode':
|
|
453
|
+
return AliasYamlNode(
|
|
454
|
+
start=tk,
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
def document(tk: ta.Optional[YamlToken], body: ta.Optional[YamlNode]) -> 'DocumentYamlNode':
|
|
459
|
+
return DocumentYamlNode(
|
|
460
|
+
start=tk,
|
|
461
|
+
body=body,
|
|
462
|
+
)
|
|
463
|
+
|
|
464
|
+
|
|
465
|
+
def directive(tk: YamlToken) -> 'DirectiveYamlNode':
|
|
466
|
+
return DirectiveYamlNode(
|
|
467
|
+
start=tk,
|
|
468
|
+
)
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
def literal(tk: YamlToken) -> 'LiteralYamlNode':
|
|
472
|
+
return LiteralYamlNode(
|
|
473
|
+
start=tk,
|
|
474
|
+
)
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def tag(tk: YamlToken) -> 'TagYamlNode':
|
|
478
|
+
return TagYamlNode(
|
|
479
|
+
start=tk,
|
|
480
|
+
)
|
|
481
|
+
|
|
482
|
+
|
|
483
|
+
##
|
|
484
|
+
|
|
485
|
+
|
|
486
|
+
# File contains all documents in YAML file
|
|
487
|
+
@dc.dataclass()
|
|
488
|
+
class YamlFile:
|
|
489
|
+
name: str = ''
|
|
490
|
+
docs: ta.List['DocumentYamlNode'] = dc.field(default_factory=dataclass_field_required('docs'))
|
|
491
|
+
|
|
492
|
+
# read implements (io.Reader).Read
|
|
493
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
494
|
+
for doc in self.docs:
|
|
495
|
+
n = doc.read(p)
|
|
496
|
+
if isinstance(n, EofYamlError):
|
|
497
|
+
continue
|
|
498
|
+
return n
|
|
499
|
+
return EofYamlError()
|
|
500
|
+
|
|
501
|
+
# string all documents to text
|
|
502
|
+
def string(self) -> str:
|
|
503
|
+
docs: ta.List[str] = []
|
|
504
|
+
for doc in self.docs:
|
|
505
|
+
docs.append(doc.string())
|
|
506
|
+
if len(docs) > 0:
|
|
507
|
+
return '\n'.join(docs) + '\n'
|
|
508
|
+
else:
|
|
509
|
+
return ''
|
|
510
|
+
|
|
511
|
+
|
|
512
|
+
##
|
|
513
|
+
|
|
514
|
+
|
|
515
|
+
# DocumentNode type of Document
|
|
516
|
+
@dc.dataclass()
|
|
517
|
+
class DocumentYamlNode(BaseYamlNode):
|
|
518
|
+
start: ta.Optional[YamlToken] = dc.field(default_factory=dataclass_field_required('start')) # position of DocumentHeader ( `---` ) # noqa
|
|
519
|
+
end: ta.Optional[YamlToken] = None # position of DocumentEnd ( `...` )
|
|
520
|
+
body: ta.Optional[YamlNode] = dc.field(default_factory=dataclass_field_required('body'))
|
|
521
|
+
|
|
522
|
+
# read implements (io.Reader).Read
|
|
523
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
524
|
+
return read_node(p, self)
|
|
525
|
+
|
|
526
|
+
# type returns DocumentNodeType
|
|
527
|
+
def type(self) -> YamlNodeType:
|
|
528
|
+
return YamlNodeType.DOCUMENT
|
|
529
|
+
|
|
530
|
+
# get_token returns token instance
|
|
531
|
+
def get_token(self) -> ta.Optional[YamlToken]:
|
|
532
|
+
return check.not_none(self.body).get_token()
|
|
533
|
+
|
|
534
|
+
# add_column add column number to child nodes recursively
|
|
535
|
+
def add_column(self, col: int) -> None:
|
|
536
|
+
if self.body is not None:
|
|
537
|
+
self.body.add_column(col)
|
|
538
|
+
|
|
539
|
+
# string document to text
|
|
540
|
+
def string(self) -> str:
|
|
541
|
+
doc: ta.List[str] = []
|
|
542
|
+
if self.start is not None:
|
|
543
|
+
doc.append(self.start.value)
|
|
544
|
+
if self.body is not None:
|
|
545
|
+
doc.append(self.body.string())
|
|
546
|
+
if self.end is not None:
|
|
547
|
+
doc.append(self.end.value)
|
|
548
|
+
return '\n'.join(doc)
|
|
549
|
+
|
|
550
|
+
# marshal_yaml encodes to a YAML text
|
|
551
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
552
|
+
return self.string()
|
|
553
|
+
|
|
554
|
+
|
|
555
|
+
##
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
# NullNode type of null node
|
|
559
|
+
@dc.dataclass()
|
|
560
|
+
class NullYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
561
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
562
|
+
|
|
563
|
+
# read implements(io.Reader).Read
|
|
564
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
565
|
+
return read_node(p, self)
|
|
566
|
+
|
|
567
|
+
# type returns NullType
|
|
568
|
+
def type(self) -> YamlNodeType:
|
|
569
|
+
return YamlNodeType.NULL
|
|
570
|
+
|
|
571
|
+
# get_token returns token instance
|
|
572
|
+
def get_token(self) -> YamlToken:
|
|
573
|
+
return self.token
|
|
574
|
+
|
|
575
|
+
# add_column add column number to child nodes recursively
|
|
576
|
+
def add_column(self, col: int) -> None:
|
|
577
|
+
YamlToken.add_column(self.token, col)
|
|
578
|
+
|
|
579
|
+
# get_value returns nil value
|
|
580
|
+
def get_value(self) -> ta.Any:
|
|
581
|
+
return None
|
|
582
|
+
|
|
583
|
+
# String returns `null` text
|
|
584
|
+
def string(self) -> str:
|
|
585
|
+
if self.token.type == YamlTokenType.IMPLICIT_NULL:
|
|
586
|
+
if self.comment is not None:
|
|
587
|
+
return self.comment.string()
|
|
588
|
+
return ''
|
|
589
|
+
if self.comment is not None:
|
|
590
|
+
return add_comment_string('null', self.comment)
|
|
591
|
+
return self.string_without_comment()
|
|
592
|
+
|
|
593
|
+
def string_without_comment(self) -> str:
|
|
594
|
+
return 'null'
|
|
595
|
+
|
|
596
|
+
# marshal_yaml encodes to a YAML text
|
|
597
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
598
|
+
return self.string()
|
|
599
|
+
|
|
600
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
601
|
+
def is_merge_key(self) -> bool:
|
|
602
|
+
return False
|
|
603
|
+
|
|
604
|
+
|
|
605
|
+
##
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
# IntegerNode type of integer node
|
|
609
|
+
@dc.dataclass()
|
|
610
|
+
class IntegerYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
611
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
612
|
+
value: ta.Any = dc.field(default_factory=dataclass_field_required('value')) # int64 or uint64 value
|
|
613
|
+
|
|
614
|
+
# read implements(io.Reader).Read
|
|
615
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
616
|
+
return read_node(p, self)
|
|
617
|
+
|
|
618
|
+
# type returns IntegerType
|
|
619
|
+
def type(self) -> YamlNodeType:
|
|
620
|
+
return YamlNodeType.INTEGER
|
|
621
|
+
|
|
622
|
+
# get_token returns token instance
|
|
623
|
+
def get_token(self) -> YamlToken:
|
|
624
|
+
return self.token
|
|
625
|
+
|
|
626
|
+
# add_column add column number to child nodes recursively
|
|
627
|
+
def add_column(self, col: int) -> None:
|
|
628
|
+
YamlToken.add_column(self.token, col)
|
|
629
|
+
|
|
630
|
+
# get_value returns int64 value
|
|
631
|
+
def get_value(self) -> ta.Any:
|
|
632
|
+
return self.value
|
|
633
|
+
|
|
634
|
+
# String int64 to text
|
|
635
|
+
def string(self) -> str:
|
|
636
|
+
if self.comment is not None:
|
|
637
|
+
return add_comment_string(self.token.value, self.comment)
|
|
638
|
+
return self.string_without_comment()
|
|
639
|
+
|
|
640
|
+
def string_without_comment(self) -> str:
|
|
641
|
+
return self.token.value
|
|
642
|
+
|
|
643
|
+
# marshal_yaml encodes to a YAML text
|
|
644
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
645
|
+
return self.string()
|
|
646
|
+
|
|
647
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
648
|
+
def is_merge_key(self) -> bool:
|
|
649
|
+
return False
|
|
650
|
+
|
|
651
|
+
|
|
652
|
+
##
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
# FloatNode type of float node
|
|
656
|
+
@dc.dataclass()
|
|
657
|
+
class FloatYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
658
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
659
|
+
precision: int = 0
|
|
660
|
+
value: float = dc.field(default_factory=dataclass_field_required('value'))
|
|
661
|
+
|
|
662
|
+
# read implements(io.Reader).Read
|
|
663
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
664
|
+
return read_node(p, self)
|
|
665
|
+
|
|
666
|
+
# type returns FloatType
|
|
667
|
+
def type(self) -> YamlNodeType:
|
|
668
|
+
return YamlNodeType.FLOAT
|
|
669
|
+
|
|
670
|
+
# get_token returns token instance
|
|
671
|
+
def get_token(self) -> YamlToken:
|
|
672
|
+
return self.token
|
|
673
|
+
|
|
674
|
+
# add_column add column number to child nodes recursively
|
|
675
|
+
def add_column(self, col: int) -> None:
|
|
676
|
+
YamlToken.add_column(self.token, col)
|
|
677
|
+
|
|
678
|
+
# get_value returns float64 value
|
|
679
|
+
def get_value(self) -> ta.Any:
|
|
680
|
+
return self.value
|
|
681
|
+
|
|
682
|
+
# String float64 to text
|
|
683
|
+
def string(self) -> str:
|
|
684
|
+
if self.comment is not None:
|
|
685
|
+
return add_comment_string(self.token.value, self.comment)
|
|
686
|
+
return self.string_without_comment()
|
|
687
|
+
|
|
688
|
+
def string_without_comment(self) -> str:
|
|
689
|
+
return self.token.value
|
|
690
|
+
|
|
691
|
+
# marshal_yaml encodes to a YAML text
|
|
692
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
693
|
+
return self.string()
|
|
694
|
+
|
|
695
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
696
|
+
def is_merge_key(self) -> bool:
|
|
697
|
+
return False
|
|
698
|
+
|
|
699
|
+
|
|
700
|
+
##
|
|
701
|
+
|
|
702
|
+
|
|
703
|
+
def _go_is_print(char_ord):
|
|
704
|
+
"""
|
|
705
|
+
Approximates Go's unicode.IsPrint logic. A rune is printable if it is a letter, mark, number, punctuation, symbol,
|
|
706
|
+
or ASCII space. (Corresponds to Unicode categories L, M, N, P, S, plus U+0020 SPACE).
|
|
707
|
+
"""
|
|
708
|
+
|
|
709
|
+
if char_ord == 0x20: # ASCII space
|
|
710
|
+
return True
|
|
711
|
+
# Check if the character is in categories L, M, N, P, S (Graphic characters)
|
|
712
|
+
category = unicodedata.category(chr(char_ord))
|
|
713
|
+
if category.startswith(('L', 'M', 'N', 'P', 'S')):
|
|
714
|
+
return True
|
|
715
|
+
return False
|
|
716
|
+
|
|
717
|
+
|
|
718
|
+
def strconv_quote(s: str) -> str:
|
|
719
|
+
"""
|
|
720
|
+
Produces a double-quoted string literal with Go-style escapes, similar to Go's strconv.Quote.
|
|
721
|
+
"""
|
|
722
|
+
|
|
723
|
+
res = ['"']
|
|
724
|
+
for char_val in s:
|
|
725
|
+
char_ord = ord(char_val)
|
|
726
|
+
|
|
727
|
+
if char_val == '"':
|
|
728
|
+
res.append('\\"')
|
|
729
|
+
elif char_val == '\\':
|
|
730
|
+
res.append('\\\\')
|
|
731
|
+
elif char_val == '\a':
|
|
732
|
+
res.append('\\a')
|
|
733
|
+
elif char_val == '\b':
|
|
734
|
+
res.append('\\b')
|
|
735
|
+
elif char_val == '\f':
|
|
736
|
+
res.append('\\f')
|
|
737
|
+
elif char_val == '\n':
|
|
738
|
+
res.append('\\n')
|
|
739
|
+
elif char_val == '\r':
|
|
740
|
+
res.append('\\r')
|
|
741
|
+
elif char_val == '\t':
|
|
742
|
+
res.append('\\t')
|
|
743
|
+
elif char_val == '\v':
|
|
744
|
+
res.append('\\v')
|
|
745
|
+
elif char_ord < 0x20 or char_ord == 0x7F: # C0 controls and DEL
|
|
746
|
+
res.append(f'\\x{char_ord:02x}')
|
|
747
|
+
elif 0x20 <= char_ord < 0x7F: # Printable ASCII (already handled \, ")
|
|
748
|
+
res.append(char_val)
|
|
749
|
+
# Unicode characters (char_ord >= 0x80) and C1 controls (0x80-0x9F)
|
|
750
|
+
elif _go_is_print(char_ord):
|
|
751
|
+
res.append(char_val)
|
|
752
|
+
elif char_ord <= 0xFFFF:
|
|
753
|
+
res.append(f'\\u{char_ord:04x}')
|
|
754
|
+
else:
|
|
755
|
+
res.append(f'\\U{char_ord:08x}')
|
|
756
|
+
|
|
757
|
+
res.append('"')
|
|
758
|
+
return ''.join(res)
|
|
759
|
+
|
|
760
|
+
|
|
761
|
+
##
|
|
762
|
+
|
|
763
|
+
|
|
764
|
+
# StringNode type of string node
|
|
765
|
+
@dc.dataclass()
|
|
766
|
+
class StringYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
767
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
768
|
+
value: str = dc.field(default_factory=dataclass_field_required('value'))
|
|
769
|
+
|
|
770
|
+
# read implements(io.Reader).Read
|
|
771
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
772
|
+
return read_node(p, self)
|
|
773
|
+
|
|
774
|
+
# type returns StringType
|
|
775
|
+
def type(self) -> YamlNodeType:
|
|
776
|
+
return YamlNodeType.STRING
|
|
777
|
+
|
|
778
|
+
# get_token returns token instance
|
|
779
|
+
def get_token(self) -> YamlToken:
|
|
780
|
+
return self.token
|
|
781
|
+
|
|
782
|
+
# add_column add column number to child nodes recursively
|
|
783
|
+
def add_column(self, col: int) -> None:
|
|
784
|
+
YamlToken.add_column(self.token, col)
|
|
785
|
+
|
|
786
|
+
# get_value returns string value
|
|
787
|
+
def get_value(self) -> ta.Any:
|
|
788
|
+
return self.value
|
|
789
|
+
|
|
790
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
791
|
+
def is_merge_key(self) -> bool:
|
|
792
|
+
return False
|
|
793
|
+
|
|
794
|
+
# string string value to text with quote or literal header if required
|
|
795
|
+
def string(self) -> str:
|
|
796
|
+
if self.token.type == YamlTokenType.SINGLE_QUOTE:
|
|
797
|
+
quoted = escape_single_quote(self.value)
|
|
798
|
+
if self.comment is not None:
|
|
799
|
+
return add_comment_string(quoted, self.comment)
|
|
800
|
+
return quoted
|
|
801
|
+
elif self.token.type == YamlTokenType.DOUBLE_QUOTE:
|
|
802
|
+
quoted = strconv_quote(self.value)
|
|
803
|
+
if self.comment is not None:
|
|
804
|
+
return add_comment_string(quoted, self.comment)
|
|
805
|
+
return quoted
|
|
806
|
+
|
|
807
|
+
lbc = tokens.detect_line_break_char(self.value)
|
|
808
|
+
if lbc in self.value:
|
|
809
|
+
# This block assumes that the line breaks in this inside scalar content and the Outside scalar content are
|
|
810
|
+
# the same. It works mostly, but inconsistencies occur if line break characters are mixed.
|
|
811
|
+
header = tokens.literal_block_header(self.value)
|
|
812
|
+
space = ' ' * (self.token.position.column - 1)
|
|
813
|
+
indent = ' ' * self.token.position.indent_num
|
|
814
|
+
values: ta.List[str] = []
|
|
815
|
+
for v in self.value.split(lbc):
|
|
816
|
+
values.append(f'{space}{indent}{v}')
|
|
817
|
+
block = lbc.join(values).rstrip(f'{lbc}{indent}{space}').rstrip(f'{indent}{space}')
|
|
818
|
+
return f'{header}{lbc}{block}'
|
|
819
|
+
elif len(self.value) > 0 and (self.value[0] == '{' or self.value[0] == '['):
|
|
820
|
+
return f"'{self.value}'"
|
|
821
|
+
if self.comment is not None:
|
|
822
|
+
return add_comment_string(self.value, self.comment)
|
|
823
|
+
return self.value
|
|
824
|
+
|
|
825
|
+
def string_without_comment(self) -> str:
|
|
826
|
+
if self.token.type == YamlTokenType.SINGLE_QUOTE:
|
|
827
|
+
quoted = f"'{self.value}'"
|
|
828
|
+
return quoted
|
|
829
|
+
elif self.token.type == YamlTokenType.DOUBLE_QUOTE:
|
|
830
|
+
quoted = strconv_quote(self.value)
|
|
831
|
+
return quoted
|
|
832
|
+
|
|
833
|
+
lbc = tokens.detect_line_break_char(self.value)
|
|
834
|
+
if lbc in self.value:
|
|
835
|
+
# This block assumes that the line breaks in this inside scalar content and the Outside scalar content are
|
|
836
|
+
# the same. It works mostly, but inconsistencies occur if line break characters are mixed.
|
|
837
|
+
header = tokens.literal_block_header(self.value)
|
|
838
|
+
space = ' ' * (self.token.position.column - 1)
|
|
839
|
+
indent = ' ' * self.token.position.indent_num
|
|
840
|
+
values: ta.List[str] = []
|
|
841
|
+
for v in self.value.split(lbc):
|
|
842
|
+
values.append(f'{space}{indent}{v}')
|
|
843
|
+
block = lbc.join(values).rstrip(f'{lbc}{indent}{space}').rstrip(f' {space}')
|
|
844
|
+
return f'{header}{lbc}{block}'
|
|
845
|
+
elif len(self.value) > 0 and (self.value[0] == '{' or self.value[0] == '['):
|
|
846
|
+
return f"'{self.value}'"
|
|
847
|
+
return self.value
|
|
848
|
+
|
|
849
|
+
# marshal_yaml encodes to a YAML text
|
|
850
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
851
|
+
return self.string()
|
|
852
|
+
|
|
853
|
+
|
|
854
|
+
# escape_single_quote escapes s to a single quoted scalar.
|
|
855
|
+
# https://yaml.org/spec/1.2.2/#732-single-quoted-style
|
|
856
|
+
def escape_single_quote(s: str) -> str:
|
|
857
|
+
sb = io.StringIO()
|
|
858
|
+
# growLen = len(s) + # s includes also one ' from the doubled pair
|
|
859
|
+
# 2 + # opening and closing '
|
|
860
|
+
# strings.Count(s, "'") # ' added by ReplaceAll
|
|
861
|
+
# sb.Grow(growLen)
|
|
862
|
+
sb.write("'")
|
|
863
|
+
sb.write(s.replace("'", "''"))
|
|
864
|
+
sb.write("'")
|
|
865
|
+
return sb.getvalue()
|
|
866
|
+
|
|
867
|
+
|
|
868
|
+
##
|
|
869
|
+
|
|
870
|
+
|
|
871
|
+
# LiteralNode type of literal node
|
|
872
|
+
@dc.dataclass()
|
|
873
|
+
class LiteralYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
874
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
875
|
+
value: ta.Optional['StringYamlNode'] = None
|
|
876
|
+
|
|
877
|
+
# read implements(io.Reader).Read
|
|
878
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
879
|
+
return read_node(p, self)
|
|
880
|
+
|
|
881
|
+
# type returns LiteralType
|
|
882
|
+
def type(self) -> YamlNodeType:
|
|
883
|
+
return YamlNodeType.LITERAL
|
|
884
|
+
|
|
885
|
+
# get_token returns token instance
|
|
886
|
+
def get_token(self) -> YamlToken:
|
|
887
|
+
return self.start
|
|
888
|
+
|
|
889
|
+
# add_column add column number to child nodes recursively
|
|
890
|
+
def add_column(self, col: int) -> None:
|
|
891
|
+
YamlToken.add_column(self.start, col)
|
|
892
|
+
if self.value is not None:
|
|
893
|
+
self.value.add_column(col)
|
|
894
|
+
|
|
895
|
+
# get_value returns string value
|
|
896
|
+
def get_value(self) -> ta.Any:
|
|
897
|
+
return self.string()
|
|
898
|
+
|
|
899
|
+
# String literal to text
|
|
900
|
+
def string(self) -> str:
|
|
901
|
+
origin = check.not_none(check.not_none(self.value).get_token()).origin
|
|
902
|
+
lit = origin.rstrip(' ').rstrip('\n')
|
|
903
|
+
if self.comment is not None:
|
|
904
|
+
return f'{self.start.value} {self.comment.string()}\n{lit}'
|
|
905
|
+
return f'{self.start.value}\n{lit}'
|
|
906
|
+
|
|
907
|
+
def string_without_comment(self) -> str:
|
|
908
|
+
return self.string()
|
|
909
|
+
|
|
910
|
+
# marshal_yaml encodes to a YAML text
|
|
911
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
912
|
+
return self.string()
|
|
913
|
+
|
|
914
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
915
|
+
def is_merge_key(self) -> bool:
|
|
916
|
+
return False
|
|
917
|
+
|
|
918
|
+
|
|
919
|
+
##
|
|
920
|
+
|
|
921
|
+
|
|
922
|
+
# MergeKeyNode type of merge key node
|
|
923
|
+
@dc.dataclass()
|
|
924
|
+
class MergeKeyYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
925
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
926
|
+
|
|
927
|
+
# read implements(io.Reader).Read
|
|
928
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
929
|
+
return read_node(p, self)
|
|
930
|
+
|
|
931
|
+
# type returns MergeKeyType
|
|
932
|
+
def type(self) -> YamlNodeType:
|
|
933
|
+
return YamlNodeType.MERGE_KEY
|
|
934
|
+
|
|
935
|
+
# get_token returns token instance
|
|
936
|
+
def get_token(self) -> YamlToken:
|
|
937
|
+
return self.token
|
|
938
|
+
|
|
939
|
+
# get_value returns '<<' value
|
|
940
|
+
def get_value(self) -> ta.Any:
|
|
941
|
+
return self.token.value
|
|
942
|
+
|
|
943
|
+
# String returns '<<' value
|
|
944
|
+
def string(self) -> str:
|
|
945
|
+
return self.string_without_comment()
|
|
946
|
+
|
|
947
|
+
def string_without_comment(self) -> str:
|
|
948
|
+
return self.token.value
|
|
949
|
+
|
|
950
|
+
# add_column add column number to child nodes recursively
|
|
951
|
+
def add_column(self, col: int) -> None:
|
|
952
|
+
YamlToken.add_column(self.token, col)
|
|
953
|
+
|
|
954
|
+
# marshal_yaml encodes to a YAML text
|
|
955
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
956
|
+
return str(self)
|
|
957
|
+
|
|
958
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
959
|
+
def is_merge_key(self) -> bool:
|
|
960
|
+
return True
|
|
961
|
+
|
|
962
|
+
|
|
963
|
+
##
|
|
964
|
+
|
|
965
|
+
|
|
966
|
+
# BoolNode type of boolean node
|
|
967
|
+
@dc.dataclass()
|
|
968
|
+
class BoolYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
969
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
970
|
+
value: bool = dc.field(default_factory=dataclass_field_required('value'))
|
|
971
|
+
|
|
972
|
+
# read implements(io.Reader).Read
|
|
973
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
974
|
+
return read_node(p, self)
|
|
975
|
+
|
|
976
|
+
# type returns BoolType
|
|
977
|
+
def type(self) -> YamlNodeType:
|
|
978
|
+
return YamlNodeType.BOOL
|
|
979
|
+
|
|
980
|
+
# get_token returns token instance
|
|
981
|
+
def get_token(self) -> YamlToken:
|
|
982
|
+
return self.token
|
|
983
|
+
|
|
984
|
+
# add_column add column number to child nodes recursively
|
|
985
|
+
def add_column(self, col: int) -> None:
|
|
986
|
+
YamlToken.add_column(self.token, col)
|
|
987
|
+
|
|
988
|
+
# get_value returns boolean value
|
|
989
|
+
def get_value(self) -> ta.Any:
|
|
990
|
+
return self.value
|
|
991
|
+
|
|
992
|
+
# String boolean to text
|
|
993
|
+
def string(self) -> str:
|
|
994
|
+
if self.comment is not None:
|
|
995
|
+
return add_comment_string(self.token.value, self.comment)
|
|
996
|
+
return self.string_without_comment()
|
|
997
|
+
|
|
998
|
+
def string_without_comment(self) -> str:
|
|
999
|
+
return self.token.value
|
|
1000
|
+
|
|
1001
|
+
# marshal_yaml encodes to a YAML text
|
|
1002
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1003
|
+
return self.string()
|
|
1004
|
+
|
|
1005
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
1006
|
+
def is_merge_key(self) -> bool:
|
|
1007
|
+
return False
|
|
1008
|
+
|
|
1009
|
+
|
|
1010
|
+
##
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
# InfinityNode type of infinity node
|
|
1014
|
+
@dc.dataclass()
|
|
1015
|
+
class InfinityYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
1016
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
1017
|
+
value: float = dc.field(default_factory=dataclass_field_required('value'))
|
|
1018
|
+
|
|
1019
|
+
# read implements(io.Reader).Read
|
|
1020
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1021
|
+
return read_node(p, self)
|
|
1022
|
+
|
|
1023
|
+
# type returns InfinityType
|
|
1024
|
+
def type(self) -> YamlNodeType:
|
|
1025
|
+
return YamlNodeType.INFINITY
|
|
1026
|
+
|
|
1027
|
+
# get_token returns token instance
|
|
1028
|
+
def get_token(self) -> YamlToken:
|
|
1029
|
+
return self.token
|
|
1030
|
+
|
|
1031
|
+
# add_column add column number to child nodes recursively
|
|
1032
|
+
def add_column(self, col: int) -> None:
|
|
1033
|
+
YamlToken.add_column(self.token, col)
|
|
1034
|
+
|
|
1035
|
+
# get_value returns math.Inf(0) or math.Inf(-1)
|
|
1036
|
+
def get_value(self) -> ta.Any:
|
|
1037
|
+
return self.value
|
|
1038
|
+
|
|
1039
|
+
# String infinity to text
|
|
1040
|
+
def string(self) -> str:
|
|
1041
|
+
if self.comment is not None:
|
|
1042
|
+
return add_comment_string(self.token.value, self.comment)
|
|
1043
|
+
return self.string_without_comment()
|
|
1044
|
+
|
|
1045
|
+
def string_without_comment(self) -> str:
|
|
1046
|
+
return self.token.value
|
|
1047
|
+
|
|
1048
|
+
# marshal_yaml encodes to a YAML text
|
|
1049
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1050
|
+
return self.string()
|
|
1051
|
+
|
|
1052
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
1053
|
+
def is_merge_key(self) -> bool:
|
|
1054
|
+
return False
|
|
1055
|
+
|
|
1056
|
+
|
|
1057
|
+
##
|
|
1058
|
+
|
|
1059
|
+
|
|
1060
|
+
# NanNode type of nan node
|
|
1061
|
+
@dc.dataclass()
|
|
1062
|
+
class NanYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
1063
|
+
token: YamlToken = dc.field(default_factory=dataclass_field_required('token'))
|
|
1064
|
+
|
|
1065
|
+
# read implements(io.Reader).Read
|
|
1066
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1067
|
+
return read_node(p, self)
|
|
1068
|
+
|
|
1069
|
+
# type returns NanType
|
|
1070
|
+
def type(self) -> YamlNodeType:
|
|
1071
|
+
return YamlNodeType.NAN
|
|
1072
|
+
|
|
1073
|
+
# get_token returns token instance
|
|
1074
|
+
def get_token(self) -> YamlToken:
|
|
1075
|
+
return self.token
|
|
1076
|
+
|
|
1077
|
+
# add_column add column number to child nodes recursively
|
|
1078
|
+
def add_column(self, col: int) -> None:
|
|
1079
|
+
YamlToken.add_column(self.token, col)
|
|
1080
|
+
|
|
1081
|
+
# get_value returns math.NaN()
|
|
1082
|
+
def get_value(self) -> ta.Any:
|
|
1083
|
+
return float('nan')
|
|
1084
|
+
|
|
1085
|
+
# String returns .nan
|
|
1086
|
+
def string(self) -> str:
|
|
1087
|
+
if self.comment is not None:
|
|
1088
|
+
return add_comment_string(self.token.value, self.comment)
|
|
1089
|
+
return self.string_without_comment()
|
|
1090
|
+
|
|
1091
|
+
def string_without_comment(self) -> str:
|
|
1092
|
+
return self.token.value
|
|
1093
|
+
|
|
1094
|
+
# marshal_yaml encodes to a YAML text
|
|
1095
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1096
|
+
return self.string()
|
|
1097
|
+
|
|
1098
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
1099
|
+
def is_merge_key(self) -> bool:
|
|
1100
|
+
return False
|
|
1101
|
+
|
|
1102
|
+
|
|
1103
|
+
##
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
# MapNode interface of MappingValueNode / MappingNode
|
|
1107
|
+
class MapYamlNode(Abstract):
|
|
1108
|
+
@abc.abstractmethod
|
|
1109
|
+
def map_range(self) -> 'MapYamlNodeIter':
|
|
1110
|
+
raise NotImplementedError
|
|
1111
|
+
|
|
1112
|
+
|
|
1113
|
+
START_RANGE_INDEX = -1
|
|
1114
|
+
|
|
1115
|
+
|
|
1116
|
+
# MapNodeIter is an iterator for ranging over a MapNode
|
|
1117
|
+
@dc.dataclass()
|
|
1118
|
+
class MapYamlNodeIter:
|
|
1119
|
+
values: ta.List['MappingValueYamlNode']
|
|
1120
|
+
idx: int
|
|
1121
|
+
|
|
1122
|
+
# next advances the map iterator and reports whether there is another entry.
|
|
1123
|
+
# It returns false when the iterator is exhausted.
|
|
1124
|
+
def next(self) -> bool:
|
|
1125
|
+
self.idx += 1
|
|
1126
|
+
nxt = self.idx < len(self.values)
|
|
1127
|
+
return nxt
|
|
1128
|
+
|
|
1129
|
+
# key returns the key of the iterator's current map node entry.
|
|
1130
|
+
def key(self) -> MapKeyYamlNode:
|
|
1131
|
+
return self.values[self.idx].key
|
|
1132
|
+
|
|
1133
|
+
# value returns the value of the iterator's current map node entry.
|
|
1134
|
+
def value(self) -> YamlNode:
|
|
1135
|
+
return self.values[self.idx].value
|
|
1136
|
+
|
|
1137
|
+
# key_value returns the MappingValueNode of the iterator's current map node entry.
|
|
1138
|
+
def key_value(self) -> 'MappingValueYamlNode':
|
|
1139
|
+
return self.values[self.idx]
|
|
1140
|
+
|
|
1141
|
+
|
|
1142
|
+
#
|
|
1143
|
+
|
|
1144
|
+
|
|
1145
|
+
# MappingNode type of mapping node
|
|
1146
|
+
@dc.dataclass()
|
|
1147
|
+
class MappingYamlNode(BaseYamlNode):
|
|
1148
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
1149
|
+
end: ta.Optional[YamlToken] = None
|
|
1150
|
+
is_flow_style: bool = dc.field(default_factory=dataclass_field_required('is_flow_style'))
|
|
1151
|
+
values: ta.List['MappingValueYamlNode'] = dc.field(default_factory=dataclass_field_required('values'))
|
|
1152
|
+
foot_comment: ta.Optional['CommentGroupYamlNode'] = None
|
|
1153
|
+
|
|
1154
|
+
def start_pos(self) -> YamlPosition:
|
|
1155
|
+
if len(self.values) == 0:
|
|
1156
|
+
return self.start.position
|
|
1157
|
+
return check.not_none(self.values[0].key.get_token()).position
|
|
1158
|
+
|
|
1159
|
+
# merge merge key/value of map.
|
|
1160
|
+
def merge(self, target: 'MappingYamlNode') -> None:
|
|
1161
|
+
key_to_map_value_map: ta.Dict[str, MappingValueYamlNode] = {}
|
|
1162
|
+
for value in self.values:
|
|
1163
|
+
key = value.key.string()
|
|
1164
|
+
key_to_map_value_map[key] = value
|
|
1165
|
+
column = self.start_pos().column - target.start_pos().column
|
|
1166
|
+
target.add_column(column)
|
|
1167
|
+
for value in target.values:
|
|
1168
|
+
map_value = key_to_map_value_map.get(value.key.string())
|
|
1169
|
+
if map_value is not None:
|
|
1170
|
+
map_value.value = value.value
|
|
1171
|
+
else:
|
|
1172
|
+
self.values.append(value)
|
|
1173
|
+
|
|
1174
|
+
# set_is_flow_style set value to is_flow_style field recursively.
|
|
1175
|
+
def set_is_flow_style(self, is_flow: bool) -> None:
|
|
1176
|
+
self.is_flow_style = is_flow
|
|
1177
|
+
for value in self.values:
|
|
1178
|
+
value.set_is_flow_style(is_flow)
|
|
1179
|
+
|
|
1180
|
+
# read implements(io.Reader).Read
|
|
1181
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1182
|
+
return read_node(p, self)
|
|
1183
|
+
|
|
1184
|
+
# type returns MappingType
|
|
1185
|
+
def type(self) -> YamlNodeType:
|
|
1186
|
+
return YamlNodeType.MAPPING
|
|
1187
|
+
|
|
1188
|
+
# get_token returns token instance
|
|
1189
|
+
def get_token(self) -> YamlToken:
|
|
1190
|
+
return self.start
|
|
1191
|
+
|
|
1192
|
+
# add_column add column number to child nodes recursively
|
|
1193
|
+
def add_column(self, col: int) -> None:
|
|
1194
|
+
YamlToken.add_column(self.start, col)
|
|
1195
|
+
YamlToken.add_column(self.end, col)
|
|
1196
|
+
for value in self.values:
|
|
1197
|
+
value.add_column(col)
|
|
1198
|
+
|
|
1199
|
+
def flow_style_string(self, comment_mode: bool) -> str:
|
|
1200
|
+
values: ta.List[str] = []
|
|
1201
|
+
for value in self.values:
|
|
1202
|
+
values.append(value.string().lstrip(' '))
|
|
1203
|
+
map_text = f'{{{", ".join(values)}}}'
|
|
1204
|
+
if comment_mode and self.comment is not None:
|
|
1205
|
+
return add_comment_string(map_text, self.comment)
|
|
1206
|
+
return map_text
|
|
1207
|
+
|
|
1208
|
+
def block_style_string(self, comment_mode: bool) -> str:
|
|
1209
|
+
values: ta.List[str] = []
|
|
1210
|
+
for value0 in self.values:
|
|
1211
|
+
values.append(value0.string())
|
|
1212
|
+
map_text = '\n'.join(values)
|
|
1213
|
+
if comment_mode and self.comment is not None:
|
|
1214
|
+
value1 = values[0]
|
|
1215
|
+
space_num = 0
|
|
1216
|
+
for i in range(len(value1)):
|
|
1217
|
+
if value1[i] != ' ':
|
|
1218
|
+
break
|
|
1219
|
+
space_num += 1
|
|
1220
|
+
comment = self.comment.string_with_space(space_num)
|
|
1221
|
+
return f'{comment}\n{map_text}'
|
|
1222
|
+
return map_text
|
|
1223
|
+
|
|
1224
|
+
# String mapping values to text
|
|
1225
|
+
def string(self) -> str:
|
|
1226
|
+
if len(self.values) == 0:
|
|
1227
|
+
if self.comment is not None:
|
|
1228
|
+
return add_comment_string('{}', self.comment)
|
|
1229
|
+
return '{}'
|
|
1230
|
+
|
|
1231
|
+
comment_mode = True
|
|
1232
|
+
if self.is_flow_style or len(self.values) == 0:
|
|
1233
|
+
return self.flow_style_string(comment_mode)
|
|
1234
|
+
|
|
1235
|
+
return self.block_style_string(comment_mode)
|
|
1236
|
+
|
|
1237
|
+
# map_range implements MapNode protocol
|
|
1238
|
+
def map_range(self) -> MapYamlNodeIter:
|
|
1239
|
+
return MapYamlNodeIter(
|
|
1240
|
+
idx=START_RANGE_INDEX,
|
|
1241
|
+
values=self.values,
|
|
1242
|
+
)
|
|
1243
|
+
|
|
1244
|
+
# marshal_yaml encodes to a YAML text
|
|
1245
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1246
|
+
return self.string()
|
|
1247
|
+
|
|
1248
|
+
|
|
1249
|
+
##
|
|
1250
|
+
|
|
1251
|
+
|
|
1252
|
+
# MappingKeyNode type of tag node
|
|
1253
|
+
@dc.dataclass()
|
|
1254
|
+
class MappingKeyYamlNode(MapKeyYamlNode, BaseYamlNode):
|
|
1255
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
1256
|
+
value: ta.Optional[YamlNode] = None
|
|
1257
|
+
|
|
1258
|
+
# read implements(io.Reader).Read
|
|
1259
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1260
|
+
return read_node(p, self)
|
|
1261
|
+
|
|
1262
|
+
# type returns MappingKeyType
|
|
1263
|
+
def type(self) -> YamlNodeType:
|
|
1264
|
+
return YamlNodeType.MAPPING_KEY
|
|
1265
|
+
|
|
1266
|
+
# get_token returns token instance
|
|
1267
|
+
def get_token(self) -> YamlToken:
|
|
1268
|
+
return self.start
|
|
1269
|
+
|
|
1270
|
+
# add_column add column number to child nodes recursively
|
|
1271
|
+
def add_column(self, col: int) -> None:
|
|
1272
|
+
YamlToken.add_column(self.start, col)
|
|
1273
|
+
if self.value is not None:
|
|
1274
|
+
self.value.add_column(col)
|
|
1275
|
+
|
|
1276
|
+
# String tag to text
|
|
1277
|
+
def string(self) -> str:
|
|
1278
|
+
return self.string_without_comment()
|
|
1279
|
+
|
|
1280
|
+
def string_without_comment(self) -> str:
|
|
1281
|
+
return f'{self.start.value} {check.not_none(self.value).string()}'
|
|
1282
|
+
|
|
1283
|
+
# marshal_yaml encodes to a YAML text
|
|
1284
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1285
|
+
return self.string()
|
|
1286
|
+
|
|
1287
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
1288
|
+
def is_merge_key(self) -> bool:
|
|
1289
|
+
if self.value is None:
|
|
1290
|
+
return False
|
|
1291
|
+
key = self.value
|
|
1292
|
+
if not isinstance(key, MapKeyYamlNode):
|
|
1293
|
+
return False
|
|
1294
|
+
return key.is_merge_key()
|
|
1295
|
+
|
|
1296
|
+
|
|
1297
|
+
##
|
|
1298
|
+
|
|
1299
|
+
|
|
1300
|
+
# MappingValueNode type of mapping value
|
|
1301
|
+
@dc.dataclass()
|
|
1302
|
+
class MappingValueYamlNode(BaseYamlNode):
|
|
1303
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start')) # delimiter token ':'.
|
|
1304
|
+
collect_entry: ta.Optional[YamlToken] = None # collect entry token ','.
|
|
1305
|
+
key: MapKeyYamlNode = dc.field(default_factory=dataclass_field_required('key'))
|
|
1306
|
+
value: YamlNode = dc.field(default_factory=dataclass_field_required('value'))
|
|
1307
|
+
foot_comment: ta.Optional['CommentGroupYamlNode'] = None
|
|
1308
|
+
is_flow_style: bool = False
|
|
1309
|
+
|
|
1310
|
+
# Replace replace value node.
|
|
1311
|
+
def replace(self, value: YamlNode) -> ta.Optional[YamlError]:
|
|
1312
|
+
column = check.not_none(self.value.get_token()).position.column - check.not_none(value.get_token()).position.column # noqa
|
|
1313
|
+
value.add_column(column)
|
|
1314
|
+
self.value = value
|
|
1315
|
+
return None
|
|
1316
|
+
|
|
1317
|
+
# read implements(io.Reader).Read
|
|
1318
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1319
|
+
return read_node(p, self)
|
|
1320
|
+
|
|
1321
|
+
# type returns MappingValueType
|
|
1322
|
+
def type(self) -> YamlNodeType:
|
|
1323
|
+
return YamlNodeType.MAPPING_VALUE
|
|
1324
|
+
|
|
1325
|
+
# get_token returns token instance
|
|
1326
|
+
def get_token(self) -> YamlToken:
|
|
1327
|
+
return self.start
|
|
1328
|
+
|
|
1329
|
+
# add_column add column number to child nodes recursively
|
|
1330
|
+
def add_column(self, col: int) -> None:
|
|
1331
|
+
YamlToken.add_column(self.start, col)
|
|
1332
|
+
if self.key is not None:
|
|
1333
|
+
self.key.add_column(col)
|
|
1334
|
+
if self.value is not None:
|
|
1335
|
+
self.value.add_column(col)
|
|
1336
|
+
|
|
1337
|
+
# set_is_flow_style set value to is_flow_style field recursively.
|
|
1338
|
+
def set_is_flow_style(self, is_flow: bool) -> None:
|
|
1339
|
+
self.is_flow_style = is_flow
|
|
1340
|
+
if isinstance(self.value, MappingYamlNode):
|
|
1341
|
+
self.value.set_is_flow_style(is_flow)
|
|
1342
|
+
elif isinstance(self.value, MappingValueYamlNode):
|
|
1343
|
+
self.value.set_is_flow_style(is_flow)
|
|
1344
|
+
elif isinstance(self.value, SequenceYamlNode):
|
|
1345
|
+
self.value.set_is_flow_style(is_flow)
|
|
1346
|
+
|
|
1347
|
+
# String mapping value to text
|
|
1348
|
+
def string(self) -> str:
|
|
1349
|
+
text: str
|
|
1350
|
+
if self.comment is not None:
|
|
1351
|
+
text = f'{self.comment.string_with_space(check.not_none(self.key.get_token()).position.column - 1)}\n{self.to_string()}' # noqa
|
|
1352
|
+
else:
|
|
1353
|
+
text = self.to_string()
|
|
1354
|
+
|
|
1355
|
+
if self.foot_comment is not None:
|
|
1356
|
+
text += f'\n{self.foot_comment.string_with_space(check.not_none(self.key.get_token()).position.column - 1)}'
|
|
1357
|
+
|
|
1358
|
+
return text
|
|
1359
|
+
|
|
1360
|
+
def to_string(self) -> str:
|
|
1361
|
+
space = ' ' * (check.not_none(self.key.get_token()).position.column - 1)
|
|
1362
|
+
if check_line_break(check.not_none(self.key.get_token())):
|
|
1363
|
+
space = f'\n{space}'
|
|
1364
|
+
|
|
1365
|
+
key_indent_level = check.not_none(self.key.get_token()).position.indent_level
|
|
1366
|
+
value_indent_level = check.not_none(self.value.get_token()).position.indent_level
|
|
1367
|
+
key_comment = self.key.get_comment()
|
|
1368
|
+
|
|
1369
|
+
if isinstance(self.value, ScalarYamlNode):
|
|
1370
|
+
value = self.value.string()
|
|
1371
|
+
if value == '':
|
|
1372
|
+
# implicit null value.
|
|
1373
|
+
return f'{space}{self.key.string()}:'
|
|
1374
|
+
return f'{space}{self.key.string()}: {value}'
|
|
1375
|
+
|
|
1376
|
+
elif key_indent_level < value_indent_level and not self.is_flow_style:
|
|
1377
|
+
if key_comment is not None:
|
|
1378
|
+
return f'{space}{self.key.string_without_comment()}: {key_comment.string()}\n{self.value.string()}'
|
|
1379
|
+
|
|
1380
|
+
return f'{space}{self.key.string()}:\n{self.value.string()}'
|
|
1381
|
+
|
|
1382
|
+
elif isinstance(self.value, MappingYamlNode) and (self.value.is_flow_style or len(self.value.values) == 0):
|
|
1383
|
+
return f'{space}{self.key.string()}: {self.value.string()}'
|
|
1384
|
+
|
|
1385
|
+
elif isinstance(self.value, SequenceYamlNode) and (self.value.is_flow_style or len(self.value.values) == 0):
|
|
1386
|
+
return f'{space}{self.key.string()}: {self.value.string()}'
|
|
1387
|
+
|
|
1388
|
+
elif isinstance(self.value, AnchorYamlNode):
|
|
1389
|
+
return f'{space}{self.key.string()}: {self.value.string()}'
|
|
1390
|
+
|
|
1391
|
+
elif isinstance(self.value, AliasYamlNode):
|
|
1392
|
+
return f'{space}{self.key.string()}: {self.value.string()}'
|
|
1393
|
+
|
|
1394
|
+
elif isinstance(self.value, TagYamlNode):
|
|
1395
|
+
return f'{space}{self.key.string()}: {self.value.string()}'
|
|
1396
|
+
|
|
1397
|
+
if key_comment is not None:
|
|
1398
|
+
return f'{space}{self.key.string_without_comment()}: {key_comment.string()}\n{self.value.string()}'
|
|
1399
|
+
|
|
1400
|
+
if isinstance(self.value, MappingYamlNode) and self.value.comment is not None:
|
|
1401
|
+
return f'{space}{self.key.string()}: {self.value.string().lstrip(" ")}'
|
|
1402
|
+
|
|
1403
|
+
return f'{space}{self.key.string()}:\n{self.value.string()}'
|
|
1404
|
+
|
|
1405
|
+
# map_range implements MapNode protocol
|
|
1406
|
+
def map_range(self) -> MapYamlNodeIter:
|
|
1407
|
+
return MapYamlNodeIter(
|
|
1408
|
+
idx=START_RANGE_INDEX,
|
|
1409
|
+
values=[self],
|
|
1410
|
+
)
|
|
1411
|
+
|
|
1412
|
+
# marshal_yaml encodes to a YAML text
|
|
1413
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1414
|
+
return self.string()
|
|
1415
|
+
|
|
1416
|
+
|
|
1417
|
+
##
|
|
1418
|
+
|
|
1419
|
+
|
|
1420
|
+
# ArrayNode interface of SequenceNode
|
|
1421
|
+
class ArrayYamlNode(YamlNode, Abstract):
|
|
1422
|
+
@abc.abstractmethod
|
|
1423
|
+
def array_range(self) -> ta.Optional['ArrayYamlNodeIter']:
|
|
1424
|
+
raise NotImplementedError
|
|
1425
|
+
|
|
1426
|
+
|
|
1427
|
+
# ArrayNodeIter is an iterator for ranging over a ArrayNode
|
|
1428
|
+
@dc.dataclass()
|
|
1429
|
+
class ArrayYamlNodeIter:
|
|
1430
|
+
values: ta.List[YamlNode]
|
|
1431
|
+
idx: int
|
|
1432
|
+
|
|
1433
|
+
# next advances the array iterator and reports whether there is another entry.
|
|
1434
|
+
# It returns false when the iterator is exhausted.
|
|
1435
|
+
def next(self) -> bool:
|
|
1436
|
+
self.idx += 1
|
|
1437
|
+
nxt = self.idx < len(self.values)
|
|
1438
|
+
return nxt
|
|
1439
|
+
|
|
1440
|
+
# Value returns the value of the iterator's current array entry.
|
|
1441
|
+
def value(self) -> YamlNode:
|
|
1442
|
+
return self.values[self.idx]
|
|
1443
|
+
|
|
1444
|
+
# len returns length of array
|
|
1445
|
+
def len(self) -> int:
|
|
1446
|
+
return len(self.values)
|
|
1447
|
+
|
|
1448
|
+
|
|
1449
|
+
##
|
|
1450
|
+
|
|
1451
|
+
|
|
1452
|
+
# SequenceNode type of sequence node
|
|
1453
|
+
@dc.dataclass()
|
|
1454
|
+
class SequenceYamlNode(BaseYamlNode, ArrayYamlNode):
|
|
1455
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
1456
|
+
end: ta.Optional[YamlToken] = None
|
|
1457
|
+
is_flow_style: bool = dc.field(default_factory=dataclass_field_required('is_flow_style'))
|
|
1458
|
+
values: ta.List[ta.Optional[YamlNode]] = dc.field(default_factory=dataclass_field_required('values'))
|
|
1459
|
+
value_head_comments: ta.List[ta.Optional['CommentGroupYamlNode']] = dc.field(default_factory=list)
|
|
1460
|
+
entries: ta.List['SequenceEntryYamlNode'] = dc.field(default_factory=list)
|
|
1461
|
+
foot_comment: ta.Optional['CommentGroupYamlNode'] = None
|
|
1462
|
+
|
|
1463
|
+
# replace replace value node.
|
|
1464
|
+
def replace(self, idx: int, value: YamlNode) -> ta.Optional[YamlError]:
|
|
1465
|
+
if len(self.values) <= idx:
|
|
1466
|
+
return yaml_error(f'invalid index for sequence: sequence length is {len(self.values):d}, but specified {idx:d} index') # noqa
|
|
1467
|
+
|
|
1468
|
+
column = check.not_none(check.not_none(self.values[idx]).get_token()).position.column - check.not_none(value.get_token()).position.column # noqa
|
|
1469
|
+
value.add_column(column)
|
|
1470
|
+
self.values[idx] = value
|
|
1471
|
+
return None
|
|
1472
|
+
|
|
1473
|
+
# merge merge sequence value.
|
|
1474
|
+
def merge(self, target: 'SequenceYamlNode') -> None:
|
|
1475
|
+
column = self.start.position.column - target.start.position.column
|
|
1476
|
+
target.add_column(column)
|
|
1477
|
+
self.values.extend(target.values)
|
|
1478
|
+
if len(target.value_head_comments) == 0:
|
|
1479
|
+
self.value_head_comments.extend([None] * len(target.values))
|
|
1480
|
+
return
|
|
1481
|
+
|
|
1482
|
+
self.value_head_comments.extend(target.value_head_comments)
|
|
1483
|
+
|
|
1484
|
+
# set_is_flow_style set value to is_flow_style field recursively.
|
|
1485
|
+
def set_is_flow_style(self, is_flow: bool) -> None:
|
|
1486
|
+
self.is_flow_style = is_flow
|
|
1487
|
+
for value in self.values:
|
|
1488
|
+
if isinstance(value, MappingYamlNode):
|
|
1489
|
+
value.set_is_flow_style(is_flow)
|
|
1490
|
+
elif isinstance(value, MappingValueYamlNode):
|
|
1491
|
+
value.set_is_flow_style(is_flow)
|
|
1492
|
+
elif isinstance(value, SequenceYamlNode):
|
|
1493
|
+
value.set_is_flow_style(is_flow)
|
|
1494
|
+
|
|
1495
|
+
# read implements(io.Reader).Read
|
|
1496
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1497
|
+
return read_node(p, self)
|
|
1498
|
+
|
|
1499
|
+
# type returns SequenceType
|
|
1500
|
+
def type(self) -> YamlNodeType:
|
|
1501
|
+
return YamlNodeType.SEQUENCE
|
|
1502
|
+
|
|
1503
|
+
# get_token returns token instance
|
|
1504
|
+
def get_token(self) -> YamlToken:
|
|
1505
|
+
return self.start
|
|
1506
|
+
|
|
1507
|
+
# add_column add column number to child nodes recursively
|
|
1508
|
+
def add_column(self, col: int) -> None:
|
|
1509
|
+
YamlToken.add_column(self.start, col)
|
|
1510
|
+
YamlToken.add_column(self.end, col)
|
|
1511
|
+
for value in self.values:
|
|
1512
|
+
check.not_none(value).add_column(col)
|
|
1513
|
+
|
|
1514
|
+
def flow_style_string(self) -> str:
|
|
1515
|
+
values: ta.List[str] = []
|
|
1516
|
+
for value in self.values:
|
|
1517
|
+
values.append(check.not_none(value).string())
|
|
1518
|
+
|
|
1519
|
+
return f'[{", ".join(values)}]'
|
|
1520
|
+
|
|
1521
|
+
def block_style_string(self) -> str:
|
|
1522
|
+
space = ' ' * (self.start.position.column - 1)
|
|
1523
|
+
values: ta.List[str] = []
|
|
1524
|
+
if self.comment is not None:
|
|
1525
|
+
values.append(self.comment.string_with_space(self.start.position.column - 1))
|
|
1526
|
+
|
|
1527
|
+
for idx, value in enumerate(self.values):
|
|
1528
|
+
if value is None:
|
|
1529
|
+
continue
|
|
1530
|
+
|
|
1531
|
+
value_str = value.string()
|
|
1532
|
+
new_line_prefix = ''
|
|
1533
|
+
if value_str.startswith('\n'):
|
|
1534
|
+
value_str = value_str[1:]
|
|
1535
|
+
new_line_prefix = '\n'
|
|
1536
|
+
|
|
1537
|
+
splitted_values = value_str.split('\n')
|
|
1538
|
+
trimmed_first_value = splitted_values[0].lstrip(' ')
|
|
1539
|
+
diff_length = len(splitted_values[0]) - len(trimmed_first_value)
|
|
1540
|
+
if (
|
|
1541
|
+
(len(splitted_values) > 1 and value.type() == YamlNodeType.STRING) or
|
|
1542
|
+
value.type() == YamlNodeType.LITERAL
|
|
1543
|
+
):
|
|
1544
|
+
# If multi-line string, the space characters for indent have already been added, so delete them.
|
|
1545
|
+
prefix = space + ' '
|
|
1546
|
+
for i in range(1, len(splitted_values)):
|
|
1547
|
+
splitted_values[i] = splitted_values[i].lstrip(prefix)
|
|
1548
|
+
|
|
1549
|
+
new_values: ta.List[str] = [trimmed_first_value]
|
|
1550
|
+
for i in range(1, len(splitted_values)):
|
|
1551
|
+
if len(splitted_values[i]) <= diff_length:
|
|
1552
|
+
# this line is \n or white space only
|
|
1553
|
+
new_values.append('')
|
|
1554
|
+
continue
|
|
1555
|
+
|
|
1556
|
+
trimmed = splitted_values[i][diff_length:]
|
|
1557
|
+
new_values.append(f'{space} {trimmed}')
|
|
1558
|
+
|
|
1559
|
+
new_value = '\n'.join(new_values)
|
|
1560
|
+
if len(self.value_head_comments) == len(self.values) and self.value_head_comments[idx] is not None:
|
|
1561
|
+
values.append(
|
|
1562
|
+
f'{new_line_prefix}'
|
|
1563
|
+
f'{check.not_none(self.value_head_comments[idx]).string_with_space(self.start.position.column - 1)}', # noqa
|
|
1564
|
+
)
|
|
1565
|
+
new_line_prefix = ''
|
|
1566
|
+
|
|
1567
|
+
values.append(f'{new_line_prefix}{space}- {new_value}')
|
|
1568
|
+
|
|
1569
|
+
if self.foot_comment is not None:
|
|
1570
|
+
values.append(self.foot_comment.string_with_space(self.start.position.column - 1))
|
|
1571
|
+
|
|
1572
|
+
return '\n'.join(values)
|
|
1573
|
+
|
|
1574
|
+
# String sequence to text
|
|
1575
|
+
def string(self) -> str:
|
|
1576
|
+
if self.is_flow_style or len(self.values) == 0:
|
|
1577
|
+
return self.flow_style_string()
|
|
1578
|
+
return self.block_style_string()
|
|
1579
|
+
|
|
1580
|
+
# array_range implements ArrayNode protocol
|
|
1581
|
+
def array_range(self) -> ta.Optional[ArrayYamlNodeIter]:
|
|
1582
|
+
return ArrayYamlNodeIter(
|
|
1583
|
+
idx=START_RANGE_INDEX,
|
|
1584
|
+
values=ta.cast('ta.List[YamlNode]', self.values),
|
|
1585
|
+
)
|
|
1586
|
+
|
|
1587
|
+
# marshal_yaml encodes to a YAML text
|
|
1588
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1589
|
+
return self.string()
|
|
1590
|
+
|
|
1591
|
+
|
|
1592
|
+
##
|
|
1593
|
+
|
|
1594
|
+
|
|
1595
|
+
# SequenceEntryNode is the sequence entry.
|
|
1596
|
+
@dc.dataclass()
|
|
1597
|
+
class SequenceEntryYamlNode(BaseYamlNode):
|
|
1598
|
+
head_comment: ta.Optional['CommentGroupYamlNode'] = dc.field(default_factory=dataclass_field_required('head_commend')) # head comment. # noqa
|
|
1599
|
+
line_comment: ta.Optional['CommentGroupYamlNode'] = None # line comment e.g.) - # comment.
|
|
1600
|
+
start: ta.Optional[YamlToken] = dc.field(default_factory=dataclass_field_required('start')) # entry token.
|
|
1601
|
+
value: YamlNode = dc.field(default_factory=dataclass_field_required('value')) # value node.
|
|
1602
|
+
|
|
1603
|
+
# String node to text
|
|
1604
|
+
def string(self) -> str:
|
|
1605
|
+
return '' # TODO
|
|
1606
|
+
|
|
1607
|
+
# get_token returns token instance
|
|
1608
|
+
def get_token(self) -> ta.Optional[YamlToken]:
|
|
1609
|
+
return self.start
|
|
1610
|
+
|
|
1611
|
+
# type returns type of node
|
|
1612
|
+
def type(self) -> YamlNodeType:
|
|
1613
|
+
return YamlNodeType.SEQUENCE_ENTRY
|
|
1614
|
+
|
|
1615
|
+
# add_column add column number to child nodes recursively
|
|
1616
|
+
def add_column(self, col: int) -> None:
|
|
1617
|
+
YamlToken.add_column(self.start, col)
|
|
1618
|
+
|
|
1619
|
+
# set_comment set line comment.
|
|
1620
|
+
def set_comment(self, node: ta.Optional['CommentGroupYamlNode']) -> ta.Optional[YamlError]:
|
|
1621
|
+
self.line_comment = node
|
|
1622
|
+
return None
|
|
1623
|
+
|
|
1624
|
+
# comment returns comment token instance
|
|
1625
|
+
def get_comment(self) -> ta.Optional['CommentGroupYamlNode']:
|
|
1626
|
+
return self.line_comment
|
|
1627
|
+
|
|
1628
|
+
# marshal_yaml
|
|
1629
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1630
|
+
return self.string()
|
|
1631
|
+
|
|
1632
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1633
|
+
return read_node(p, self)
|
|
1634
|
+
|
|
1635
|
+
|
|
1636
|
+
# sequence_entry creates SequenceEntryNode instance.
|
|
1637
|
+
def sequence_entry(
|
|
1638
|
+
start: ta.Optional[YamlToken],
|
|
1639
|
+
value: YamlNode,
|
|
1640
|
+
head_comment: ta.Optional['CommentGroupYamlNode'],
|
|
1641
|
+
) -> SequenceEntryYamlNode:
|
|
1642
|
+
return SequenceEntryYamlNode(
|
|
1643
|
+
head_comment=head_comment,
|
|
1644
|
+
start=start,
|
|
1645
|
+
value=value,
|
|
1646
|
+
)
|
|
1647
|
+
|
|
1648
|
+
|
|
1649
|
+
# SequenceMergeValue creates SequenceMergeValueNode instance.
|
|
1650
|
+
def sequence_merge_value(*values: MapYamlNode) -> 'SequenceMergeValueYamlNode':
|
|
1651
|
+
return SequenceMergeValueYamlNode(
|
|
1652
|
+
values=list(values),
|
|
1653
|
+
)
|
|
1654
|
+
|
|
1655
|
+
|
|
1656
|
+
##
|
|
1657
|
+
|
|
1658
|
+
|
|
1659
|
+
# SequenceMergeValueNode is used to convert the Sequence node specified for the merge key into a MapNode format.
|
|
1660
|
+
@dc.dataclass()
|
|
1661
|
+
class SequenceMergeValueYamlNode(MapYamlNode):
|
|
1662
|
+
values: ta.List[MapYamlNode] = dc.field(default_factory=dataclass_field_required('values'))
|
|
1663
|
+
|
|
1664
|
+
# map_range returns MapNodeIter instance.
|
|
1665
|
+
def map_range(self) -> MapYamlNodeIter:
|
|
1666
|
+
ret = MapYamlNodeIter(values=[], idx=START_RANGE_INDEX)
|
|
1667
|
+
for value in self.values:
|
|
1668
|
+
it = value.map_range()
|
|
1669
|
+
ret.values.extend(it.values)
|
|
1670
|
+
return ret
|
|
1671
|
+
|
|
1672
|
+
|
|
1673
|
+
##
|
|
1674
|
+
|
|
1675
|
+
|
|
1676
|
+
# AnchorNode type of anchor node
|
|
1677
|
+
@dc.dataclass()
|
|
1678
|
+
class AnchorYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
1679
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
1680
|
+
name: ta.Optional[YamlNode] = None
|
|
1681
|
+
value: ta.Optional[YamlNode] = None
|
|
1682
|
+
|
|
1683
|
+
def string_without_comment(self) -> str:
|
|
1684
|
+
return check.not_none(self.value).string()
|
|
1685
|
+
|
|
1686
|
+
def set_name(self, name: str) -> ta.Optional[YamlError]:
|
|
1687
|
+
if self.name is None:
|
|
1688
|
+
return ERR_INVALID_ANCHOR_NAME
|
|
1689
|
+
s = self.name
|
|
1690
|
+
if not isinstance(s, StringYamlNode):
|
|
1691
|
+
return ERR_INVALID_ANCHOR_NAME
|
|
1692
|
+
s.value = name
|
|
1693
|
+
return None
|
|
1694
|
+
|
|
1695
|
+
# read implements(io.Reader).Read
|
|
1696
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1697
|
+
return read_node(p, self)
|
|
1698
|
+
|
|
1699
|
+
# type returns AnchorType
|
|
1700
|
+
def type(self) -> YamlNodeType:
|
|
1701
|
+
return YamlNodeType.ANCHOR
|
|
1702
|
+
|
|
1703
|
+
# get_token returns token instance
|
|
1704
|
+
def get_token(self) -> YamlToken:
|
|
1705
|
+
return self.start
|
|
1706
|
+
|
|
1707
|
+
def get_value(self) -> ta.Any:
|
|
1708
|
+
return check.not_none(check.not_none(self.value).get_token()).value
|
|
1709
|
+
|
|
1710
|
+
# add_column add column number to child nodes recursively
|
|
1711
|
+
def add_column(self, col: int) -> None:
|
|
1712
|
+
YamlToken.add_column(self.start, col)
|
|
1713
|
+
if self.name is not None:
|
|
1714
|
+
self.name.add_column(col)
|
|
1715
|
+
if self.value is not None:
|
|
1716
|
+
self.value.add_column(col)
|
|
1717
|
+
|
|
1718
|
+
# String anchor to text
|
|
1719
|
+
def string(self) -> str:
|
|
1720
|
+
anchor = '&' + check.not_none(self.name).string()
|
|
1721
|
+
value = check.not_none(self.value).string()
|
|
1722
|
+
if isinstance(self.value, SequenceYamlNode) and not self.value.is_flow_style:
|
|
1723
|
+
return f'{anchor}\n{value}'
|
|
1724
|
+
elif isinstance(self.value, MappingYamlNode) and not self.value.is_flow_style:
|
|
1725
|
+
return f'{anchor}\n{value}'
|
|
1726
|
+
if value == '':
|
|
1727
|
+
# implicit null value.
|
|
1728
|
+
return anchor
|
|
1729
|
+
return f'{anchor} {value}'
|
|
1730
|
+
|
|
1731
|
+
# marshal_yaml encodes to a YAML text
|
|
1732
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1733
|
+
return self.string()
|
|
1734
|
+
|
|
1735
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
1736
|
+
def is_merge_key(self) -> bool:
|
|
1737
|
+
if self.value is None:
|
|
1738
|
+
return False
|
|
1739
|
+
key = self.value
|
|
1740
|
+
if not isinstance(key, MapKeyYamlNode):
|
|
1741
|
+
return False
|
|
1742
|
+
return key.is_merge_key()
|
|
1743
|
+
|
|
1744
|
+
|
|
1745
|
+
##
|
|
1746
|
+
|
|
1747
|
+
|
|
1748
|
+
# AliasNode type of alias node
|
|
1749
|
+
@dc.dataclass()
|
|
1750
|
+
class AliasYamlNode(ScalarYamlNode, BaseYamlNode):
|
|
1751
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
1752
|
+
value: ta.Optional[YamlNode] = None
|
|
1753
|
+
|
|
1754
|
+
def string_without_comment(self) -> str:
|
|
1755
|
+
return check.not_none(self.value).string()
|
|
1756
|
+
|
|
1757
|
+
def set_name(self, name: str) -> ta.Optional[YamlError]:
|
|
1758
|
+
if self.value is None:
|
|
1759
|
+
return ERR_INVALID_ALIAS_NAME
|
|
1760
|
+
if not isinstance(self.value, StringYamlNode):
|
|
1761
|
+
return ERR_INVALID_ALIAS_NAME
|
|
1762
|
+
self.value.value = name
|
|
1763
|
+
return None
|
|
1764
|
+
|
|
1765
|
+
# read implements(io.Reader).Read
|
|
1766
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1767
|
+
return read_node(p, self)
|
|
1768
|
+
|
|
1769
|
+
# type returns AliasType
|
|
1770
|
+
def type(self) -> YamlNodeType:
|
|
1771
|
+
return YamlNodeType.ALIAS
|
|
1772
|
+
|
|
1773
|
+
# get_token returns token instance
|
|
1774
|
+
def get_token(self) -> YamlToken:
|
|
1775
|
+
return self.start
|
|
1776
|
+
|
|
1777
|
+
def get_value(self) -> ta.Any:
|
|
1778
|
+
return check.not_none(check.not_none(self.value).get_token()).value
|
|
1779
|
+
|
|
1780
|
+
# add_column add column number to child nodes recursively
|
|
1781
|
+
def add_column(self, col: int) -> None:
|
|
1782
|
+
YamlToken.add_column(self.start, col)
|
|
1783
|
+
if self.value is not None:
|
|
1784
|
+
self.value.add_column(col)
|
|
1785
|
+
|
|
1786
|
+
# String alias to text
|
|
1787
|
+
def string(self) -> str:
|
|
1788
|
+
return f'*{check.not_none(self.value).string()}'
|
|
1789
|
+
|
|
1790
|
+
# marshal_yaml encodes to a YAML text
|
|
1791
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1792
|
+
return self.string()
|
|
1793
|
+
|
|
1794
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
1795
|
+
def is_merge_key(self) -> bool:
|
|
1796
|
+
return False
|
|
1797
|
+
|
|
1798
|
+
|
|
1799
|
+
##
|
|
1800
|
+
|
|
1801
|
+
|
|
1802
|
+
# DirectiveNode type of directive node
|
|
1803
|
+
@dc.dataclass()
|
|
1804
|
+
class DirectiveYamlNode(BaseYamlNode):
|
|
1805
|
+
# Start is '%' token.
|
|
1806
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
1807
|
+
# Name is directive name e.g.) "YAML" or "TAG".
|
|
1808
|
+
name: ta.Optional[YamlNode] = None
|
|
1809
|
+
# Values is directive values e.g.) "1.2" or "!!" and "tag:clarkevans.com,2002:app/".
|
|
1810
|
+
values: ta.List[YamlNode] = dc.field(default_factory=list)
|
|
1811
|
+
|
|
1812
|
+
# read implements(io.Reader).Read
|
|
1813
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1814
|
+
return read_node(p, self)
|
|
1815
|
+
|
|
1816
|
+
# type returns DirectiveType
|
|
1817
|
+
def type(self) -> YamlNodeType:
|
|
1818
|
+
return YamlNodeType.DIRECTIVE
|
|
1819
|
+
|
|
1820
|
+
# get_token returns token instance
|
|
1821
|
+
def get_token(self) -> YamlToken:
|
|
1822
|
+
return self.start
|
|
1823
|
+
|
|
1824
|
+
# add_column add column number to child nodes recursively
|
|
1825
|
+
def add_column(self, col: int) -> None:
|
|
1826
|
+
if self.name is not None:
|
|
1827
|
+
self.name.add_column(col)
|
|
1828
|
+
for value in self.values:
|
|
1829
|
+
value.add_column(col)
|
|
1830
|
+
|
|
1831
|
+
# String directive to text
|
|
1832
|
+
def string(self) -> str:
|
|
1833
|
+
values: ta.List[str] = []
|
|
1834
|
+
for val in self.values:
|
|
1835
|
+
values.append(val.string())
|
|
1836
|
+
return ' '.join(['%' + check.not_none(self.name).string(), *values])
|
|
1837
|
+
|
|
1838
|
+
# marshal_yaml encodes to a YAML text
|
|
1839
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1840
|
+
return self.string()
|
|
1841
|
+
|
|
1842
|
+
|
|
1843
|
+
##
|
|
1844
|
+
|
|
1845
|
+
|
|
1846
|
+
# TagNode type of tag node
|
|
1847
|
+
@dc.dataclass()
|
|
1848
|
+
class TagYamlNode(ScalarYamlNode, BaseYamlNode, ArrayYamlNode):
|
|
1849
|
+
directive: ta.Optional[DirectiveYamlNode] = None
|
|
1850
|
+
start: YamlToken = dc.field(default_factory=dataclass_field_required('start'))
|
|
1851
|
+
value: ta.Optional[YamlNode] = None
|
|
1852
|
+
|
|
1853
|
+
def get_value(self) -> ta.Any:
|
|
1854
|
+
if not isinstance(self.value, ScalarYamlNode):
|
|
1855
|
+
return None
|
|
1856
|
+
return self.value.get_value()
|
|
1857
|
+
|
|
1858
|
+
def string_without_comment(self) -> str:
|
|
1859
|
+
return check.not_none(self.value).string()
|
|
1860
|
+
|
|
1861
|
+
# read implements(io.Reader).Read
|
|
1862
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1863
|
+
return read_node(p, self)
|
|
1864
|
+
|
|
1865
|
+
# type returns TagType
|
|
1866
|
+
def type(self) -> YamlNodeType:
|
|
1867
|
+
return YamlNodeType.TAG
|
|
1868
|
+
|
|
1869
|
+
# get_token returns token instance
|
|
1870
|
+
def get_token(self) -> YamlToken:
|
|
1871
|
+
return self.start
|
|
1872
|
+
|
|
1873
|
+
# add_column add column number to child nodes recursively
|
|
1874
|
+
def add_column(self, col: int) -> None:
|
|
1875
|
+
YamlToken.add_column(self.start, col)
|
|
1876
|
+
if self.value is not None:
|
|
1877
|
+
self.value.add_column(col)
|
|
1878
|
+
|
|
1879
|
+
# String tag to text
|
|
1880
|
+
def string(self) -> str:
|
|
1881
|
+
value = check.not_none(self.value).string()
|
|
1882
|
+
if isinstance(self.value, SequenceYamlNode) and not self.value.is_flow_style:
|
|
1883
|
+
return f'{self.start.value}\n{value}'
|
|
1884
|
+
elif isinstance(self.value, MappingYamlNode) and not self.value.is_flow_style:
|
|
1885
|
+
return f'{self.start.value}\n{value}'
|
|
1886
|
+
|
|
1887
|
+
return f'{self.start.value} {value}'
|
|
1888
|
+
|
|
1889
|
+
# marshal_yaml encodes to a YAML text
|
|
1890
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1891
|
+
return self.string()
|
|
1892
|
+
|
|
1893
|
+
# is_merge_key returns whether it is a MergeKey node.
|
|
1894
|
+
def is_merge_key(self) -> bool:
|
|
1895
|
+
if self.value is None:
|
|
1896
|
+
return False
|
|
1897
|
+
key = self.value
|
|
1898
|
+
if not isinstance(key, MapKeyYamlNode):
|
|
1899
|
+
return False
|
|
1900
|
+
return key.is_merge_key()
|
|
1901
|
+
|
|
1902
|
+
def array_range(self) -> ta.Optional[ArrayYamlNodeIter]:
|
|
1903
|
+
arr = self.value
|
|
1904
|
+
if not isinstance(arr, ArrayYamlNode):
|
|
1905
|
+
return None
|
|
1906
|
+
return arr.array_range()
|
|
1907
|
+
|
|
1908
|
+
|
|
1909
|
+
##
|
|
1910
|
+
|
|
1911
|
+
|
|
1912
|
+
# CommentNode type of comment node
|
|
1913
|
+
@dc.dataclass()
|
|
1914
|
+
class CommentYamlNode(BaseYamlNode):
|
|
1915
|
+
token: ta.Optional[YamlToken] = dc.field(default_factory=dataclass_field_required('token'))
|
|
1916
|
+
|
|
1917
|
+
# read implements(io.Reader).Read
|
|
1918
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1919
|
+
return read_node(p, self)
|
|
1920
|
+
|
|
1921
|
+
# type returns CommentType
|
|
1922
|
+
def type(self) -> YamlNodeType:
|
|
1923
|
+
return YamlNodeType.COMMENT
|
|
1924
|
+
|
|
1925
|
+
# get_token returns token instance
|
|
1926
|
+
def get_token(self) -> ta.Optional[YamlToken]:
|
|
1927
|
+
return self.token
|
|
1928
|
+
|
|
1929
|
+
# add_column add column number to child nodes recursively
|
|
1930
|
+
def add_column(self, col: int) -> None:
|
|
1931
|
+
YamlToken.add_column(self.token, col)
|
|
1932
|
+
|
|
1933
|
+
# String comment to text
|
|
1934
|
+
def string(self) -> str:
|
|
1935
|
+
return f'#{check.not_none(self.token).value}'
|
|
1936
|
+
|
|
1937
|
+
# marshal_yaml encodes to a YAML text
|
|
1938
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1939
|
+
return self.string()
|
|
1940
|
+
|
|
1941
|
+
|
|
1942
|
+
##
|
|
1943
|
+
|
|
1944
|
+
|
|
1945
|
+
# CommentGroupNode type of comment node
|
|
1946
|
+
@dc.dataclass()
|
|
1947
|
+
class CommentGroupYamlNode(BaseYamlNode):
|
|
1948
|
+
comments: ta.List[CommentYamlNode] = dc.field(default_factory=dataclass_field_required('comments'))
|
|
1949
|
+
|
|
1950
|
+
# read implements(io.Reader).Read
|
|
1951
|
+
def read(self, p: str) -> YamlErrorOr[int]:
|
|
1952
|
+
return read_node(p, self)
|
|
1953
|
+
|
|
1954
|
+
# type returns CommentType
|
|
1955
|
+
def type(self) -> YamlNodeType:
|
|
1956
|
+
return YamlNodeType.COMMENT
|
|
1957
|
+
|
|
1958
|
+
# get_token returns token instance
|
|
1959
|
+
def get_token(self) -> ta.Optional[YamlToken]:
|
|
1960
|
+
if len(self.comments) > 0:
|
|
1961
|
+
return self.comments[0].token
|
|
1962
|
+
return None
|
|
1963
|
+
|
|
1964
|
+
# add_column add column number to child nodes recursively
|
|
1965
|
+
def add_column(self, col: int) -> None:
|
|
1966
|
+
for comment in self.comments:
|
|
1967
|
+
comment.add_column(col)
|
|
1968
|
+
|
|
1969
|
+
# String comment to text
|
|
1970
|
+
def string(self) -> str:
|
|
1971
|
+
values: ta.List[str] = []
|
|
1972
|
+
for comment in self.comments:
|
|
1973
|
+
values.append(comment.string())
|
|
1974
|
+
return '\n'.join(values)
|
|
1975
|
+
|
|
1976
|
+
def string_with_space(self, col: int) -> str:
|
|
1977
|
+
values: ta.List[str] = []
|
|
1978
|
+
space = ' ' * col
|
|
1979
|
+
for comment in self.comments:
|
|
1980
|
+
spc = space
|
|
1981
|
+
if check_line_break(check.not_none(comment.token)):
|
|
1982
|
+
spc = f'\n{spc}'
|
|
1983
|
+
values.append(spc + comment.string())
|
|
1984
|
+
return '\n'.join(values)
|
|
1985
|
+
|
|
1986
|
+
# marshal_yaml encodes to a YAML text
|
|
1987
|
+
def marshal_yaml(self) -> YamlErrorOr[str]:
|
|
1988
|
+
return self.string()
|
|
1989
|
+
|
|
1990
|
+
|
|
1991
|
+
##
|
|
1992
|
+
|
|
1993
|
+
|
|
1994
|
+
# Visitor has Visit method that is invokded for each node encountered by walk.
|
|
1995
|
+
# If the result visitor w is not nil, walk visits each of the children of node with the visitor w,
|
|
1996
|
+
# followed by a call of w.visit(nil).
|
|
1997
|
+
class Visitor(Abstract):
|
|
1998
|
+
@abc.abstractmethod
|
|
1999
|
+
def visit(self, node: YamlNode) -> ta.Optional['Visitor']:
|
|
2000
|
+
raise NotImplementedError
|
|
2001
|
+
|
|
2002
|
+
|
|
2003
|
+
# walk traverses an AST in depth-first order: It starts by calling v.visit(node); node must not be nil.
|
|
2004
|
+
# If the visitor w returned by v.visit(node) is not nil,
|
|
2005
|
+
# walk is invoked recursively with visitor w for each of the non-nil children of node,
|
|
2006
|
+
# followed by a call of w.visit(nil).
|
|
2007
|
+
def walk(v: Visitor, node: YamlNode) -> None:
|
|
2008
|
+
if (v_ := v.visit(node)) is None:
|
|
2009
|
+
return
|
|
2010
|
+
v = v_
|
|
2011
|
+
|
|
2012
|
+
n = node
|
|
2013
|
+
if isinstance(n, (CommentYamlNode, NullYamlNode)):
|
|
2014
|
+
walk_comment(v, n)
|
|
2015
|
+
if isinstance(n, IntegerYamlNode):
|
|
2016
|
+
walk_comment(v, n)
|
|
2017
|
+
if isinstance(n, FloatYamlNode):
|
|
2018
|
+
walk_comment(v, n)
|
|
2019
|
+
if isinstance(n, StringYamlNode):
|
|
2020
|
+
walk_comment(v, n)
|
|
2021
|
+
if isinstance(n, MergeKeyYamlNode):
|
|
2022
|
+
walk_comment(v, n)
|
|
2023
|
+
if isinstance(n, BoolYamlNode):
|
|
2024
|
+
walk_comment(v, n)
|
|
2025
|
+
if isinstance(n, InfinityYamlNode):
|
|
2026
|
+
walk_comment(v, n)
|
|
2027
|
+
if isinstance(n, NanYamlNode):
|
|
2028
|
+
walk_comment(v, n)
|
|
2029
|
+
if isinstance(n, LiteralYamlNode):
|
|
2030
|
+
walk_comment(v, n)
|
|
2031
|
+
walk(v, check.not_none(n.value))
|
|
2032
|
+
if isinstance(n, DirectiveYamlNode):
|
|
2033
|
+
walk_comment(v, n)
|
|
2034
|
+
walk(v, check.not_none(n.name))
|
|
2035
|
+
for value0 in n.values:
|
|
2036
|
+
walk(v, value0)
|
|
2037
|
+
if isinstance(n, TagYamlNode):
|
|
2038
|
+
walk_comment(v, n)
|
|
2039
|
+
walk(v, check.not_none(n.value))
|
|
2040
|
+
if isinstance(n, DocumentYamlNode):
|
|
2041
|
+
walk_comment(v, n)
|
|
2042
|
+
walk(v, check.not_none(n.body))
|
|
2043
|
+
if isinstance(n, MappingYamlNode):
|
|
2044
|
+
walk_comment(v, n)
|
|
2045
|
+
for value1 in n.values:
|
|
2046
|
+
walk(v, value1)
|
|
2047
|
+
if isinstance(n, MappingKeyYamlNode):
|
|
2048
|
+
walk_comment(v, n)
|
|
2049
|
+
walk(v, check.not_none(n.value))
|
|
2050
|
+
if isinstance(n, MappingValueYamlNode):
|
|
2051
|
+
walk_comment(v, n)
|
|
2052
|
+
walk(v, n.key)
|
|
2053
|
+
walk(v, n.value)
|
|
2054
|
+
if isinstance(n, SequenceYamlNode):
|
|
2055
|
+
walk_comment(v, n)
|
|
2056
|
+
for value2 in n.values:
|
|
2057
|
+
walk(v, check.not_none(value2))
|
|
2058
|
+
if isinstance(n, AnchorYamlNode):
|
|
2059
|
+
walk_comment(v, n)
|
|
2060
|
+
walk(v, check.not_none(n.name))
|
|
2061
|
+
walk(v, check.not_none(n.value))
|
|
2062
|
+
if isinstance(n, AliasYamlNode):
|
|
2063
|
+
walk_comment(v, n)
|
|
2064
|
+
walk(v, check.not_none(n.value))
|
|
2065
|
+
|
|
2066
|
+
|
|
2067
|
+
def walk_comment(v: Visitor, base: ta.Optional[BaseYamlNode]) -> None:
|
|
2068
|
+
if base is None:
|
|
2069
|
+
return
|
|
2070
|
+
if base.comment is None:
|
|
2071
|
+
return
|
|
2072
|
+
walk(v, base.comment)
|
|
2073
|
+
|
|
2074
|
+
|
|
2075
|
+
#
|
|
2076
|
+
|
|
2077
|
+
|
|
2078
|
+
@dc.dataclass()
|
|
2079
|
+
class FilterWalker(Visitor):
|
|
2080
|
+
typ: YamlNodeType = dc.field(default_factory=dataclass_field_required('typ'))
|
|
2081
|
+
results: ta.List[YamlNode] = dc.field(default_factory=list)
|
|
2082
|
+
|
|
2083
|
+
def visit(self, n: YamlNode) -> Visitor:
|
|
2084
|
+
if self.typ == n.type():
|
|
2085
|
+
self.results.append(n)
|
|
2086
|
+
return self
|
|
2087
|
+
|
|
2088
|
+
|
|
2089
|
+
#
|
|
2090
|
+
|
|
2091
|
+
|
|
2092
|
+
@dc.dataclass()
|
|
2093
|
+
class ParentFinder:
|
|
2094
|
+
target: YamlNode
|
|
2095
|
+
|
|
2096
|
+
def walk(self, parent: YamlNode, node: ta.Optional[YamlNode]) -> ta.Optional[YamlNode]:
|
|
2097
|
+
if self.target == node:
|
|
2098
|
+
return parent
|
|
2099
|
+
|
|
2100
|
+
n = node
|
|
2101
|
+
if isinstance(n, CommentYamlNode):
|
|
2102
|
+
return None
|
|
2103
|
+
if isinstance(n, NullYamlNode):
|
|
2104
|
+
return None
|
|
2105
|
+
if isinstance(n, IntegerYamlNode):
|
|
2106
|
+
return None
|
|
2107
|
+
if isinstance(n, FloatYamlNode):
|
|
2108
|
+
return None
|
|
2109
|
+
if isinstance(n, StringYamlNode):
|
|
2110
|
+
return None
|
|
2111
|
+
if isinstance(n, MergeKeyYamlNode):
|
|
2112
|
+
return None
|
|
2113
|
+
if isinstance(n, BoolYamlNode):
|
|
2114
|
+
return None
|
|
2115
|
+
if isinstance(n, InfinityYamlNode):
|
|
2116
|
+
return None
|
|
2117
|
+
if isinstance(n, NanYamlNode):
|
|
2118
|
+
return None
|
|
2119
|
+
if isinstance(n, LiteralYamlNode):
|
|
2120
|
+
return self.walk(n, n.value)
|
|
2121
|
+
if isinstance(n, DirectiveYamlNode):
|
|
2122
|
+
if (found := self.walk(n, n.name)) is not None:
|
|
2123
|
+
return found
|
|
2124
|
+
for value0 in n.values:
|
|
2125
|
+
if (found := self.walk(n, value0)) is not None:
|
|
2126
|
+
return found
|
|
2127
|
+
if isinstance(n, TagYamlNode):
|
|
2128
|
+
return self.walk(n, n.value)
|
|
2129
|
+
if isinstance(n, DocumentYamlNode):
|
|
2130
|
+
return self.walk(n, n.body)
|
|
2131
|
+
if isinstance(n, MappingYamlNode):
|
|
2132
|
+
for value1 in n.values:
|
|
2133
|
+
if (found := self.walk(n, value1)) is not None:
|
|
2134
|
+
return found
|
|
2135
|
+
if isinstance(n, MappingKeyYamlNode):
|
|
2136
|
+
return self.walk(n, n.value)
|
|
2137
|
+
if isinstance(n, MappingValueYamlNode):
|
|
2138
|
+
if (found := self.walk(n, n.key)) is not None:
|
|
2139
|
+
return found
|
|
2140
|
+
return self.walk(n, n.value)
|
|
2141
|
+
if isinstance(n, SequenceYamlNode):
|
|
2142
|
+
for value2 in n.values:
|
|
2143
|
+
if (found := self.walk(n, value2)) is not None:
|
|
2144
|
+
return found
|
|
2145
|
+
if isinstance(n, AnchorYamlNode):
|
|
2146
|
+
if (found := self.walk(n, n.name)) is not None:
|
|
2147
|
+
return found
|
|
2148
|
+
return self.walk(n, n.value)
|
|
2149
|
+
if isinstance(n, AliasYamlNode):
|
|
2150
|
+
return self.walk(n, n.value)
|
|
2151
|
+
return None
|
|
2152
|
+
|
|
2153
|
+
|
|
2154
|
+
# Parent get parent node from child node.
|
|
2155
|
+
def parent(root: YamlNode, child: YamlNode) -> ta.Optional[YamlNode]:
|
|
2156
|
+
finder = ParentFinder(target=child)
|
|
2157
|
+
return finder.walk(root, root)
|
|
2158
|
+
|
|
2159
|
+
|
|
2160
|
+
#
|
|
2161
|
+
|
|
2162
|
+
|
|
2163
|
+
# Filter returns a list of nodes that match the given type.
|
|
2164
|
+
def filter_(typ: YamlNodeType, node: YamlNode) -> ta.List[YamlNode]:
|
|
2165
|
+
walker = FilterWalker(typ=typ)
|
|
2166
|
+
walk(walker, node)
|
|
2167
|
+
return walker.results
|
|
2168
|
+
|
|
2169
|
+
|
|
2170
|
+
# FilterFile returns a list of nodes that match the given type.
|
|
2171
|
+
def filter_file(typ: YamlNodeType, file: YamlFile) -> ta.List[YamlNode]:
|
|
2172
|
+
results: ta.List[YamlNode] = []
|
|
2173
|
+
for doc in file.docs:
|
|
2174
|
+
walker = FilterWalker(typ=typ)
|
|
2175
|
+
walk(walker, doc)
|
|
2176
|
+
results.extend(walker.results)
|
|
2177
|
+
return results
|
|
2178
|
+
|
|
2179
|
+
|
|
2180
|
+
#
|
|
2181
|
+
|
|
2182
|
+
|
|
2183
|
+
@dc.dataclass()
|
|
2184
|
+
class InvalidMergeTypeYamlError(YamlError):
|
|
2185
|
+
dst: YamlNode
|
|
2186
|
+
src: YamlNode
|
|
2187
|
+
|
|
2188
|
+
@property
|
|
2189
|
+
def message(self) -> str:
|
|
2190
|
+
return f'cannot merge {self.src.type()} into {self.dst.type()}'
|
|
2191
|
+
|
|
2192
|
+
|
|
2193
|
+
# Merge merge document, map, sequence node.
|
|
2194
|
+
def merge(dst: YamlNode, src: YamlNode) -> ta.Optional[YamlError]:
|
|
2195
|
+
if isinstance(src, DocumentYamlNode):
|
|
2196
|
+
doc: DocumentYamlNode = src
|
|
2197
|
+
src = check.not_none(doc.body)
|
|
2198
|
+
|
|
2199
|
+
err = InvalidMergeTypeYamlError(dst=dst, src=src)
|
|
2200
|
+
if dst.type() == YamlNodeType.DOCUMENT:
|
|
2201
|
+
node0: DocumentYamlNode = check.isinstance(dst, DocumentYamlNode)
|
|
2202
|
+
return merge(check.not_none(node0.body), src)
|
|
2203
|
+
if dst.type() == YamlNodeType.MAPPING:
|
|
2204
|
+
node1: MappingYamlNode = check.isinstance(dst, MappingYamlNode)
|
|
2205
|
+
if not isinstance(src, MappingYamlNode):
|
|
2206
|
+
return err
|
|
2207
|
+
target0: MappingYamlNode = src
|
|
2208
|
+
node1.merge(target0)
|
|
2209
|
+
return None
|
|
2210
|
+
if dst.type() == YamlNodeType.SEQUENCE:
|
|
2211
|
+
node2: SequenceYamlNode = check.isinstance(dst, SequenceYamlNode)
|
|
2212
|
+
if not isinstance(src, SequenceYamlNode):
|
|
2213
|
+
return err
|
|
2214
|
+
target1: SequenceYamlNode = src
|
|
2215
|
+
node2.merge(target1)
|
|
2216
|
+
return None
|
|
2217
|
+
return err
|