omlish 0.0.0.dev306__py3-none-any.whl → 0.0.0.dev308__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,28 +1,88 @@
1
+ # ruff: noqa: PYI055 UP007
1
2
  """
2
3
  TODO:
4
+ - \u0123 in strings
5
+ - https://clojure.org/reference/reader
3
6
  - reader meta - ^:foo
7
+ - read table
4
8
  """
5
- # https://github.com/jorinvo/edn-data/blob/1e5824f63803eb58f35e98839352000053d47115/test/parse.test.ts
9
+ import dataclasses as dc
6
10
  import datetime
7
11
  import enum
12
+ import io
8
13
  import re
9
14
  import typing as ta
10
15
 
11
16
  from ... import check
17
+ from ...funcs.genmachine import GenMachine
18
+ from .lexing import Position
19
+ from .lexing import StreamLexer
20
+ from .lexing import Token
12
21
  from .values import Char
22
+ from .values import Collection
13
23
  from .values import Keyword
14
24
  from .values import List
15
25
  from .values import Map
16
26
  from .values import Set
17
27
  from .values import Symbol
18
- from .values import TaggedVal
28
+ from .values import Tagged
19
29
  from .values import Vector
20
30
 
21
31
 
22
32
  ##
23
33
 
24
34
 
25
- class ListParser:
35
+ WORD_CONST_VALUES: ta.Mapping[str, ta.Any] = {
36
+ 'true': True,
37
+ 'false': False,
38
+ 'nil': None,
39
+
40
+ '##Inf': float('inf'),
41
+ '##-Inf': float('-inf'),
42
+ '##NaN': float('nan'),
43
+ }
44
+
45
+
46
+ STRING_ESCAPE_MAP: ta.Mapping[str, str] = {
47
+ '"': '"',
48
+ '\\': '\\',
49
+ 'b': '\b',
50
+ 'f': '\f',
51
+ 'n': '\n',
52
+ 'r': '\r',
53
+ 't': '\t',
54
+ }
55
+
56
+
57
+ CHAR_ESCAPE_MAP: ta.Mapping[str, str] = {
58
+ 'backspace': '\b',
59
+ 'formfeed': '\f',
60
+ 'newline': '\n',
61
+ 'return': '\r',
62
+ 'space': ' ',
63
+ 'tab': '\t',
64
+ }
65
+
66
+
67
+ ##
68
+
69
+
70
+ @dc.dataclass()
71
+ class StreamParseError(Exception):
72
+ message: str
73
+
74
+ pos: Position | None = None
75
+
76
+
77
+ @dc.dataclass(frozen=True)
78
+ class MetaMaker:
79
+ fn: ta.Callable[..., ta.Any]
80
+
81
+ def __call__(self, *args: ta.Any, meta: ta.Any | None = None) -> ta.Any:
82
+ return self.fn(*args, meta=meta)
83
+
84
+
85
+ class StreamParser(GenMachine[Token, ta.Any]):
26
86
  DEFAULT_TAG_HANDLERS: ta.ClassVar[ta.Mapping[str, ta.Callable[..., ta.Any]]] = {
27
87
  'inst': lambda val: datetime.datetime.fromisoformat(val) if isinstance(val, str) else None,
28
88
  }
@@ -30,19 +90,17 @@ class ListParser:
30
90
  def __init__(
31
91
  self,
32
92
  *,
33
- keyword_maker: ta.Callable[..., ta.Any] = Keyword,
34
- char_maker: ta.Callable[..., ta.Any] = Char,
35
- symbol_maker: ta.Callable[..., ta.Any] = Symbol,
93
+ keyword_maker: ta.Callable[..., ta.Any] = MetaMaker(Keyword),
94
+ char_maker: ta.Callable[..., ta.Any] = MetaMaker(Char),
95
+ symbol_maker: ta.Callable[..., ta.Any] = MetaMaker(Symbol),
36
96
 
37
- list_maker: ta.Callable[..., ta.Any] = List.new,
38
- vector_maker: ta.Callable[..., ta.Any] = Vector.new,
39
- set_maker: ta.Callable[..., ta.Any] = Set.new,
40
- map_maker: ta.Callable[..., ta.Any] = Map.new,
97
+ list_maker: ta.Callable[..., ta.Any] = MetaMaker(List.new),
98
+ vector_maker: ta.Callable[..., ta.Any] = MetaMaker(Vector.new),
99
+ set_maker: ta.Callable[..., ta.Any] = MetaMaker(Set.new),
100
+ map_maker: ta.Callable[..., ta.Any] = MetaMaker(Map.new),
41
101
 
42
102
  tag_handlers: ta.Mapping[str, ta.Callable[..., ta.Any]] | None = None,
43
103
  ) -> None:
44
- super().__init__()
45
-
46
104
  self._keyword_maker = keyword_maker
47
105
  self._char_maker = char_maker
48
106
  self._symbol_maker = symbol_maker
@@ -57,299 +115,262 @@ class ListParser:
57
115
  **(tag_handlers or {}),
58
116
  }
59
117
 
60
- self._stack: list[tuple[ListParser._ParseMode | ListParser._StackItem, ta.Any]] = []
61
- self._mode: ListParser._ParseMode = ListParser._ParseMode.IDLE
62
- self._state = ''
63
- self._result: ta.Any = self._UNDEFINED
118
+ self._stack: list[
119
+ tuple[
120
+ ta.Union[
121
+ type[Collection],
122
+ StreamParser._StackSpecial,
123
+ ],
124
+ list[ta.Any],
125
+ ],
126
+ ] = []
64
127
 
65
- #
128
+ super().__init__(self._do_main())
66
129
 
67
- class _UNDEFINED: # noqa
68
- def __new__(cls, *args, **kwargs): # noqa
69
- raise TypeError
130
+ class _StackSpecial(enum.Enum): # noqa
131
+ DISCARD = enum.auto()
132
+ TAG = enum.auto()
70
133
 
71
- class _ParseMode(enum.Enum):
72
- IDLE = 0
73
- STRING = 1
74
- ESCAPE = 2
75
- COMMENT = 3
134
+ def _emit_value(self, value: ta.Any) -> tuple[ta.Any, ...]:
135
+ while self._stack and self._stack[-1][0] is StreamParser._StackSpecial.TAG:
136
+ cc, cl = self._stack.pop()
137
+ ts = check.non_empty_str(check.single(cl))
138
+ value = Tagged(ts, value)
76
139
 
77
- class _StackItem(enum.Enum):
78
- VECTOR = 0
79
- LIST = 1
80
- MAP = 2
81
- SET = 3
82
- TAG = 4
140
+ if not self._stack:
141
+ return (value,)
83
142
 
84
- #
143
+ cc, cl = self._stack[-1]
144
+
145
+ if cc is StreamParser._StackSpecial.DISCARD:
146
+ check.empty(cl)
147
+ self._stack.pop()
148
+ return ()
85
149
 
86
- def _update_stack(self) -> None:
87
- if not self._stack or self._result is self._UNDEFINED:
88
- return
150
+ elif cc is StreamParser._StackSpecial.TAG:
151
+ ts = check.non_empty_str(check.single(cl))
152
+ self._stack.pop()
153
+ tv = Tagged(ts, value)
154
+ return (tv,)
155
+
156
+ elif cc is Map:
157
+ if cl and len(cl[-1]) < 2:
158
+ cl[-1] = (*cl[-1], value)
159
+ else:
160
+ cl.append((value,))
161
+ return ()
162
+
163
+ elif isinstance(cc, type) and issubclass(cc, Collection):
164
+ cl.append(value)
165
+ return ()
166
+
167
+ else:
168
+ raise RuntimeError(cc)
169
+
170
+ def _do_main(self):
171
+ while True:
172
+ tok: Token
173
+ try:
174
+ tok = yield None # noqa
175
+ except GeneratorExit:
176
+ if self._stack:
177
+ raise StreamParseError('Expected value') from None
178
+ else:
179
+ raise
89
180
 
90
- stack_item, prev_state = self._stack[-1]
181
+ # ignored
91
182
 
92
- if stack_item == ListParser._StackItem.VECTOR:
93
- prev_state.append(self._result)
183
+ if tok.kind in ('SPACE', 'COMMENT'):
184
+ continue
94
185
 
95
- elif stack_item == ListParser._StackItem.LIST:
96
- prev_state.append(self._result)
186
+ # scalars
97
187
 
98
- elif stack_item == ListParser._StackItem.SET:
99
- prev_state.append(self._result)
188
+ value: ta.Any
100
189
 
101
- elif stack_item == ListParser._StackItem.MAP:
102
- if len(prev_state[1]) > 0:
103
- prev_state[0].append([prev_state[1].pop(), self._result])
104
- else:
105
- prev_state[1].append(self._result)
190
+ if tok.kind == 'STRING':
191
+ value = self._parse_string(tok)
106
192
 
107
- elif stack_item == ListParser._StackItem.TAG:
108
- self._stack.pop()
193
+ elif tok.kind == 'CHAR':
194
+ value = self._parse_char(tok)
109
195
 
110
- if prev_state == '_':
111
- self._result = self._UNDEFINED
196
+ elif tok.kind == 'WORD':
197
+ if tok.src.startswith('#'):
198
+ # FIXME: more dispatching
199
+ self._stack.append((StreamParser._StackSpecial.TAG, [tok.src[1:]]))
200
+ continue
112
201
 
113
- else:
114
- tag_handler = self._tag_handlers.get(prev_state)
115
- if tag_handler:
116
- self._result = tag_handler(self._result)
117
202
  else:
118
- self._result = TaggedVal(prev_state, self._result)
119
-
120
- self._update_stack()
121
- return
122
-
123
- # TODO: else error
124
- # Reset result after updating stack
125
- self._result = self._UNDEFINED
126
-
127
- #
128
-
129
- _INT_PAT = re.compile(r'^[-+]?(0|[1-9][0-9]*)$')
130
- _BIGINT_PAT = re.compile(r'^[-+]?(0|[1-9][0-9]*)N$')
131
- _FLOAT_PAT = re.compile(r'^[-+]?(0|[1-9][0-9]*)(\.[0-9]+)?([eE][+-]?(0|[1-9][0-9]*))?M?$')
132
-
133
- def _match(self) -> None:
134
- if self._state == 'nil':
135
- self._result = None
136
-
137
- elif self._state == 'true':
138
- self._result = True
139
-
140
- elif self._state == 'false':
141
- self._result = False
142
-
143
- elif self._state.startswith(':'):
144
- # Keyword
145
- self._result = self._keyword_maker(self._state[1:])
146
-
147
- elif self._state.startswith('#'):
148
- # Tag
149
- self._stack.append((ListParser._StackItem.TAG, self._state[1:]))
150
- self._result = self._UNDEFINED
151
-
152
- elif self._INT_PAT.match(self._state):
153
- # Int
154
- self._result = int(self._state)
155
-
156
- elif self._FLOAT_PAT.match(self._state):
157
- # Float
158
- self._result = float(self._state)
159
-
160
- elif self._BIGINT_PAT.match(self._state):
161
- # BigInt
162
- self._result = int(self._state[:-1]) # In Python we don't need special handling for bigint
163
-
164
- elif self._state.startswith('\\'):
165
- # Char
166
- check.state(len(self._state) > 1)
167
- if self._state == '\\space':
168
- c = ' '
169
- elif self._state == '\\newline':
170
- c = '\n'
171
- elif self._state == '\\return':
172
- c = '\r'
173
- elif self._state == '\\tab':
174
- c = '\t'
175
- elif self._state == '\\\\':
176
- c = '\\'
177
- elif self._state.startswith('\\u'):
178
- check.state(len(self._state) == 6)
179
- c = chr(int(self._state[2:], 16))
180
- else:
181
- check.state(len(self._state) == 2)
182
- c = self._state[1:]
203
+ value = self._parse_word(tok)
183
204
 
184
- self._result = self._char_maker(c)
205
+ # open
185
206
 
186
- elif self._state:
187
- # Symbol
188
- self._result = self._symbol_maker(self._state)
207
+ elif tok.kind == 'LPAREN':
208
+ self._stack.append((List, []))
209
+ continue
189
210
 
190
- self._state = ''
211
+ elif tok.kind == 'LBRACKET':
212
+ self._stack.append((Vector, []))
213
+ continue
191
214
 
192
- #
215
+ elif tok.kind == 'HASH_LBRACE':
216
+ self._stack.append((Set, []))
217
+ continue
193
218
 
194
- _SPACE_CHARS: ta.ClassVar[ta.AbstractSet[str]] = frozenset([',', ' ', '\t', '\n', '\r'])
219
+ elif tok.kind == 'LBRACE':
220
+ self._stack.append((Map, []))
221
+ continue
195
222
 
196
- _STRING_ESCAPE_MAP: ta.ClassVar[ta.Mapping[str, str]] = {
197
- 't': '\t',
198
- 'r': '\r',
199
- 'n': '\n',
200
- '\\': '\\',
201
- '"': '"',
202
- }
223
+ elif tok.kind == 'HASH_UNDERSCORE':
224
+ self._stack.append((StreamParser._StackSpecial.DISCARD, []))
225
+ continue
203
226
 
204
- def parse(self, src: str) -> list[ta.Any]:
205
- values = []
227
+ # close
206
228
 
207
- i = -1
208
- for i in range(len(src)):
209
- if not self._stack and self._result is not self._UNDEFINED:
210
- values.append(self._result)
211
- self._result = self._UNDEFINED
229
+ elif tok.kind == 'RPAREN':
230
+ cc, cl = self._stack.pop()
231
+ check.state(cc is List)
232
+ value = self._list_maker(cl)
212
233
 
213
- char = src[i]
234
+ elif tok.kind == 'RBRACKET':
235
+ cc, cl = self._stack.pop()
236
+ check.state(cc is Vector)
237
+ value = self._vector_maker(cl)
214
238
 
215
- if self._mode == ListParser._ParseMode.IDLE:
216
- if char == '"':
217
- self._match()
218
- self._update_stack()
219
- self._mode = ListParser._ParseMode.STRING
220
- self._state = ''
221
- continue
239
+ elif tok.kind == 'RBRACE':
240
+ cc, cl = self._stack.pop()
222
241
 
223
- if char == ';':
224
- self._mode = ListParser._ParseMode.COMMENT
225
- continue
242
+ if cc is Set:
243
+ value = self._set_maker(cl)
226
244
 
227
- if char in self._SPACE_CHARS:
228
- self._match()
229
- self._update_stack()
230
- continue
245
+ elif cc is Map:
246
+ if cl and len(cl[-1]) != 2:
247
+ raise RuntimeError('Mismatched map entries')
248
+ value = self._map_maker(cl)
231
249
 
232
- if char == '}':
233
- self._match()
234
- self._update_stack()
250
+ else:
251
+ raise RuntimeError(cc)
235
252
 
236
- if self._stack:
237
- stack_item, prev_state = self._stack.pop()
253
+ # nyi
238
254
 
239
- if stack_item == ListParser._StackItem.MAP:
240
- check.empty(prev_state[1])
241
- self._result = self._map_maker(prev_state[0])
255
+ elif tok.kind == 'META':
256
+ raise NotImplementedError
242
257
 
243
- else: # Set
244
- # FIXME:
245
- # check.state(stack_item == ListParser._StackItem.SET)
246
- self._result = self._set_maker(prev_state)
258
+ elif tok.kind == 'QUOTE':
259
+ raise NotImplementedError
247
260
 
248
- self._update_stack()
249
- continue
261
+ # failure
250
262
 
251
- if char == ']':
252
- self._match()
253
- self._update_stack()
254
- stack_item, prev_state = self._stack.pop()
255
- self._result = self._vector_maker(tuple(prev_state))
256
- self._update_stack()
257
- continue
263
+ else:
264
+ raise ValueError(tok.kind)
265
+
266
+ # emit
267
+
268
+ if (ev := self._emit_value(value)):
269
+ yield ev
270
+
271
+ def _parse_string(self, tok: Token) -> str:
272
+ check.state(tok.kind == 'STRING')
273
+ src = tok.src
274
+ check.state(src[0] == '"')
275
+ check.state(src[-1] == '"')
276
+ check.state(len(src) > 1)
277
+
278
+ p = 1
279
+ end = len(src) - 1
280
+ if src.find('\\', p, end) < 0:
281
+ return src[1:-1]
282
+
283
+ sb = io.StringIO()
284
+ while True:
285
+ if (n := src.find('\\', p, end)) < 0:
286
+ sb.write(src[p:end])
287
+ break
288
+
289
+ sb.write(src[p:n])
290
+ p = n + 1
291
+ check.state(p < end)
292
+ x = src[p]
293
+ p += 1
294
+
295
+ if x == 'u':
296
+ check.state(p < end - 4)
297
+ r = chr(int(src[p:p + 4], 16))
298
+ p += 4
258
299
 
259
- if char == ')':
260
- self._match()
261
- self._update_stack()
262
- stack_item, prev_state = self._stack.pop()
263
- self._result = self._list_maker(prev_state)
264
- self._update_stack()
265
- continue
300
+ else:
301
+ r = STRING_ESCAPE_MAP[x]
266
302
 
267
- if char == '[':
268
- self._match()
269
- self._update_stack()
270
- self._stack.append((ListParser._StackItem.VECTOR, []))
271
- continue
303
+ sb.write(r)
272
304
 
273
- if char == '(':
274
- self._match()
275
- self._update_stack()
276
- self._stack.append((ListParser._StackItem.LIST, []))
277
- continue
305
+ return sb.getvalue()
278
306
 
279
- state_plus_char = self._state + char
280
- if state_plus_char == '#_':
281
- self._stack.append((ListParser._StackItem.TAG, char))
282
- self._result = self._UNDEFINED
283
- self._state = ''
284
- continue
307
+ def _parse_char(self, tok: Token) -> ta.Any:
308
+ check.state(tok.kind == 'CHAR')
309
+ src = tok.src
310
+ check.state(len(src) > 1)
311
+ check.state(src.startswith('\\'))
285
312
 
286
- if state_plus_char.endswith('#{'):
287
- self._state = self._state[:-1] # Remove the '#'
288
- self._match()
289
- self._update_stack()
290
- self._stack.append((ListParser._StackItem.SET, []))
291
- self._state = ''
292
- continue
313
+ if len(src) == 2:
314
+ c = src[1]
293
315
 
294
- if char == '{':
295
- self._match()
296
- self._update_stack()
297
- self._stack.append((ListParser._StackItem.MAP, [[], []]))
298
- self._state = ''
299
- continue
316
+ elif src[1] == 'u':
317
+ check.state(len(src) == 6)
318
+ c = chr(int(src[2:], 16))
300
319
 
301
- self._state += char
302
- continue
320
+ elif src[1] == 'o':
321
+ # \oXXX -> octal
322
+ raise NotImplementedError
303
323
 
304
- elif self._mode == ListParser._ParseMode.STRING: # noqa
305
- if char == '\\':
306
- self._stack.append((self._mode, self._state))
307
- self._mode = ListParser._ParseMode.ESCAPE
308
- self._state = ''
309
- continue
324
+ else:
325
+ c = CHAR_ESCAPE_MAP[src[1:]]
310
326
 
311
- if char == '"':
312
- self._mode = ListParser._ParseMode.IDLE
313
- self._result = self._state
314
- self._update_stack()
315
- self._state = ''
316
- continue
327
+ return self._char_maker(c)
317
328
 
318
- self._state += char
329
+ _INT_PAT = re.compile(r'[-+]?(0|[1-9][0-9]*)N?')
330
+ _FLOAT_PAT = re.compile(r'[-+]?(0|[1-9][0-9]*)(\.[0-9]+)?([eE][+-]?(0|[1-9][0-9]*))?M?')
319
331
 
320
- elif self._mode == ListParser._ParseMode.ESCAPE:
321
- # TODO what should happen when escaping other char
322
- escaped_char = self._STRING_ESCAPE_MAP.get(char, char)
323
- stack_item, prev_state = self._stack.pop()
324
- self._mode = check.isinstance(stack_item, ListParser._ParseMode)
325
- self._state = prev_state + escaped_char
332
+ def _parse_word(self, tok: Token) -> ta.Any:
333
+ check.state(tok.kind == 'WORD')
334
+ src = tok.src
335
+ check.non_empty_str(src)
336
+ check.state(not src.startswith('#'))
326
337
 
327
- elif self._mode == ListParser._ParseMode.COMMENT:
328
- if char == '\n':
329
- self._mode = ListParser._ParseMode.IDLE
338
+ if src in WORD_CONST_VALUES:
339
+ return WORD_CONST_VALUES[src]
330
340
 
331
- else:
332
- raise RuntimeError(self._mode)
341
+ elif src.startswith(':'):
342
+ return self._keyword_maker(src[1:])
333
343
 
334
- if i >= 0:
335
- self._match()
336
- self._update_stack()
344
+ elif self._INT_PAT.fullmatch(src):
345
+ # FIXME: numbers lol
346
+ # 2r101010, 052, 8r52, 0x2a, 36r16, and 42 are all the same Long.
347
+ # Floating point numbers are read as Doubles; with M suffix they are read as BigDecimals.
348
+ # Ratios are supported, e.g. 22/7.
349
+ if src.endswith('N'):
350
+ return int(src[:-1])
337
351
 
338
- check.state(not self._stack)
352
+ else:
353
+ return int(src)
339
354
 
340
- if self._result is not self._UNDEFINED:
341
- values.append(self._result)
342
- return values
355
+ elif self._FLOAT_PAT.fullmatch(src):
356
+ return float(src)
343
357
 
358
+ else:
359
+ return self._symbol_maker(src)
344
360
 
345
- #
346
361
 
362
+ ##
347
363
 
348
- def parse_list(src: str, **kwargs: ta.Any) -> list[ta.Any]:
349
- """Parse an edn string and return the corresponding Python object."""
350
364
 
351
- parser = ListParser(**kwargs)
352
- return parser.parse(src)
365
+ def parse_list(src: str, **kwargs: ta.Any) -> list[ta.Any]:
366
+ r: list[ta.Any] = []
367
+ with StreamLexer() as l:
368
+ with StreamParser(**kwargs) as p:
369
+ for c in [*src, '']:
370
+ for t in l(c):
371
+ for o in p(t):
372
+ r.append(o) # noqa
373
+ return r
353
374
 
354
375
 
355
376
  def parse(src: str, **kwargs: ta.Any) -> ta.Any | None:
@@ -15,7 +15,7 @@ _DEBUG = __debug__
15
15
 
16
16
  @dc.dataclass(frozen=True)
17
17
  class Value(lang.Abstract, lang.Sealed):
18
- pass
18
+ meta: ta.Any | None = dc.field(default=None, kw_only=True)
19
19
 
20
20
 
21
21
  #
@@ -87,8 +87,8 @@ class List(Collection, lang.Final):
87
87
  check.isinstance(self.items, tuple)
88
88
 
89
89
  @classmethod
90
- def new(cls, items: ta.Iterable[ta.Any]) -> 'List':
91
- return cls(tuple(items))
90
+ def new(cls, items: ta.Iterable[ta.Any], *, meta: ta.Any | None = None) -> 'List':
91
+ return cls(tuple(items), meta=meta)
92
92
 
93
93
 
94
94
  @dataclass_cache_hash()
@@ -104,8 +104,8 @@ class Vector(Collection, lang.Final):
104
104
  check.isinstance(self.items, tuple)
105
105
 
106
106
  @classmethod
107
- def new(cls, items: ta.Iterable[ta.Any]) -> 'Vector':
108
- return cls(tuple(items))
107
+ def new(cls, items: ta.Iterable[ta.Any], *, meta: ta.Any | None = None) -> 'Vector':
108
+ return cls(tuple(items), meta=meta)
109
109
 
110
110
 
111
111
  @dataclass_cache_hash()
@@ -121,8 +121,8 @@ class Set(Collection, lang.Final):
121
121
  check.isinstance(self.items, tuple)
122
122
 
123
123
  @classmethod
124
- def new(cls, items: ta.Iterable[ta.Any]) -> 'Set':
125
- return cls(tuple(items))
124
+ def new(cls, items: ta.Iterable[ta.Any], *, meta: ta.Any | None = None) -> 'Set':
125
+ return cls(tuple(items), meta=meta)
126
126
 
127
127
 
128
128
  @dataclass_cache_hash()
@@ -141,8 +141,8 @@ class Map(Collection, lang.Final):
141
141
  check.equal(len(t), 2)
142
142
 
143
143
  @classmethod
144
- def new(cls, items: ta.Iterable[ta.Iterable[ta.Any]]) -> 'Map':
145
- return cls(tuple((k, v) for k, v in items))
144
+ def new(cls, items: ta.Iterable[ta.Iterable[ta.Any]], *, meta: ta.Any | None = None) -> 'Map':
145
+ return cls(tuple((k, v) for k, v in items), meta=meta)
146
146
 
147
147
 
148
148
  #
@@ -150,7 +150,7 @@ class Map(Collection, lang.Final):
150
150
 
151
151
  @dataclass_cache_hash()
152
152
  @dc.dataclass(frozen=True)
153
- class TaggedVal(Value, lang.Final):
153
+ class Tagged(Value, lang.Final):
154
154
  t: str
155
155
  v: ta.Any
156
156