omlish 0.0.0.dev305__py3-none-any.whl → 0.0.0.dev307__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
omlish/.manifests.json CHANGED
@@ -51,6 +51,20 @@
51
51
  }
52
52
  }
53
53
  },
54
+ {
55
+ "module": ".formats.edn.codec",
56
+ "attr": "_EDN_LAZY_CODEC",
57
+ "file": "omlish/formats/edn/codec.py",
58
+ "line": 25,
59
+ "value": {
60
+ "$.codecs.base.LazyLoadedCodec": {
61
+ "mod_name": "omlish.formats.edn.codec",
62
+ "attr_name": "EDN_CODEC",
63
+ "name": "edn",
64
+ "aliases": null
65
+ }
66
+ }
67
+ },
54
68
  {
55
69
  "module": ".formats.ini.codec",
56
70
  "attr": "_INI_LAZY_CODEC",
omlish/__about__.py CHANGED
@@ -1,5 +1,5 @@
1
- __version__ = '0.0.0.dev305'
2
- __revision__ = '13f1bfc223e6af63310421aa4ac6133b3f1e0110'
1
+ __version__ = '0.0.0.dev307'
2
+ __revision__ = '7fd729bd16b72af663b113712ea301f150b2449d'
3
3
 
4
4
 
5
5
  #
@@ -101,7 +101,7 @@ class Project(ProjectBase):
101
101
 
102
102
  'apsw ~= 3.49',
103
103
 
104
- 'sqlean.py ~= 3.47',
104
+ 'sqlean.py ~= 3.49',
105
105
 
106
106
  'duckdb ~= 1.2',
107
107
  ],
@@ -0,0 +1,16 @@
1
+ from .parsing import ( # Noqa
2
+ MetaMaker,
3
+ parse,
4
+ parse_list,
5
+ )
6
+
7
+ from .values import ( # noqa
8
+ Char,
9
+ Keyword,
10
+ List,
11
+ Map,
12
+ Set,
13
+ Symbol,
14
+ Tagged,
15
+ Vector,
16
+ )
@@ -0,0 +1,26 @@
1
+ import typing as ta
2
+
3
+ from ..codecs import make_object_lazy_loaded_codec
4
+ from ..codecs import make_str_object_codec
5
+ from .parsing import parse
6
+
7
+
8
+ ##
9
+
10
+
11
+ def dumps(obj: ta.Any) -> str:
12
+ # return json.dumps(obj)
13
+ raise NotImplementedError
14
+
15
+
16
+ def loads(s: str) -> ta.Any:
17
+ return parse(s)
18
+
19
+
20
+ ##
21
+
22
+
23
+ EDN_CODEC = make_str_object_codec('edn', dumps, loads)
24
+
25
+ # @omlish-manifest
26
+ _EDN_LAZY_CODEC = make_object_lazy_loaded_codec(__name__, 'EDN_CODEC', EDN_CODEC)
@@ -0,0 +1,325 @@
1
+ r"""
2
+ https://github.com/edn-format/edn
3
+ https://github.com/antlr/grammars-v4/blob/master/edn/edn.g4
4
+ https://github.com/jorinvo/edn-data/blob/1e5824f63803eb58f35e98839352000053d47115/src/parse.ts
5
+ https://clojure.org/reference/reader#_extensible_data_notation_edn
6
+ """
7
+ import dataclasses as dc
8
+ import io
9
+ import typing as ta
10
+
11
+ from ... import check
12
+ from ...funcs.genmachine import GenMachine
13
+
14
+
15
+ ##
16
+
17
+
18
+ TokenKind: ta.TypeAlias = ta.Literal[
19
+ 'STRING',
20
+ 'CHAR',
21
+ 'WORD',
22
+ 'COMMENT',
23
+
24
+ 'LPAREN',
25
+ 'RPAREN',
26
+ 'LBRACKET',
27
+ 'RBRACKET',
28
+ 'HASH_LBRACE',
29
+ 'LBRACE',
30
+ 'RBRACE',
31
+
32
+ 'HASH_UNDERSCORE',
33
+ 'META',
34
+ 'QUOTE',
35
+ ]
36
+
37
+
38
+ class Position(ta.NamedTuple):
39
+ ofs: int
40
+ line: int
41
+ col: int
42
+
43
+
44
+ class Token(ta.NamedTuple):
45
+ kind: TokenKind
46
+ src: str
47
+
48
+ pos: Position
49
+
50
+ def __iter__(self):
51
+ raise TypeError
52
+
53
+
54
+ ##
55
+
56
+
57
+ SINGLE_TOKENS: ta.Mapping[str, TokenKind] = {
58
+ '(': 'LPAREN',
59
+ ')': 'RPAREN',
60
+ '[': 'LBRACKET',
61
+ ']': 'RBRACKET',
62
+ '{': 'LBRACE',
63
+ '}': 'RBRACE',
64
+
65
+ '^': 'META',
66
+ "'": 'QUOTE',
67
+ }
68
+
69
+
70
+ HASH_TOKENS: ta.Mapping[str, TokenKind] = {
71
+ '{': 'HASH_LBRACE',
72
+ '_': 'HASH_UNDERSCORE',
73
+ }
74
+
75
+
76
+ WORD_FIRST_SPECIAL_CHARS = ':.*+!-_?$%&=<>.'
77
+ WORD_BODY_SPECIAL_CHARS = '/'
78
+
79
+
80
+ ##
81
+
82
+
83
+ @dc.dataclass()
84
+ class StreamLexError(Exception):
85
+ message: str
86
+
87
+ pos: Position
88
+
89
+
90
+ class StreamLexer(GenMachine[str, Token]):
91
+ def __init__(self) -> None:
92
+ self._ofs = 0
93
+ self._line = 1
94
+ self._col = 0
95
+
96
+ self._buf = io.StringIO()
97
+
98
+ super().__init__(self._do_main())
99
+
100
+ @property
101
+ def pos(self) -> Position:
102
+ return Position(
103
+ self._ofs,
104
+ self._line,
105
+ self._col,
106
+ )
107
+
108
+ def _char_in(self, c: str) -> str:
109
+ if not isinstance(c, str):
110
+ raise TypeError(c)
111
+ if c and len(c) != 1:
112
+ raise ValueError(c)
113
+
114
+ self._ofs += 1
115
+
116
+ if c == '\n':
117
+ self._line += 1
118
+ self._col = 0
119
+ else:
120
+ self._col += 1
121
+
122
+ return c
123
+
124
+ def _make_tok(
125
+ self,
126
+ kind: TokenKind,
127
+ src: str,
128
+ pos: Position,
129
+ ) -> ta.Sequence[Token]:
130
+ tok = Token(
131
+ kind,
132
+ src,
133
+ pos,
134
+ )
135
+ return (tok,)
136
+
137
+ def _flip_buf(self) -> str:
138
+ src = self._buf.getvalue()
139
+ self._buf.seek(0)
140
+ self._buf.truncate()
141
+ return src
142
+
143
+ def _raise(self, msg: str, src: Exception | None = None) -> ta.NoReturn:
144
+ raise StreamLexError(msg, self.pos) from src
145
+
146
+ def _do_main(self, p: str | None = None):
147
+ while True:
148
+ if p is not None:
149
+ c = p
150
+ p = None
151
+ else:
152
+ c = self._char_in((yield None)) # noqa
153
+
154
+ if not c:
155
+ return None
156
+
157
+ if c.isspace() or c == ',':
158
+ continue
159
+
160
+ if c in SINGLE_TOKENS:
161
+ yield self._make_tok(SINGLE_TOKENS[c], c, self.pos)
162
+ continue
163
+
164
+ if c == ';':
165
+ return self._do_comment()
166
+
167
+ if c == '"':
168
+ return self._do_string()
169
+
170
+ if c == '\\':
171
+ return self._do_char()
172
+
173
+ if c == '#':
174
+ return self._do_hash()
175
+
176
+ if (
177
+ c.isalnum() or
178
+ c in WORD_FIRST_SPECIAL_CHARS
179
+ ):
180
+ return self._do_word(c)
181
+
182
+ self._raise(f'Unexpected input: {c}')
183
+
184
+ def _do_comment(self):
185
+ check.state(self._buf.tell() == 0)
186
+ self._buf.write(';')
187
+
188
+ pos = self.pos
189
+
190
+ while True:
191
+ try:
192
+ c = self._char_in((yield None)) # noqa
193
+ except GeneratorExit:
194
+ self._raise('Unexpected end of input')
195
+
196
+ if not c or c == '\n':
197
+ break
198
+
199
+ self._buf.write(c)
200
+
201
+ src = self._flip_buf()
202
+ yield self._make_tok('COMMENT', src, pos)
203
+ return self._do_main()
204
+
205
+ def _do_string(self):
206
+ check.state(self._buf.tell() == 0)
207
+ self._buf.write('"')
208
+
209
+ pos = self.pos
210
+
211
+ esc = False
212
+ while True:
213
+ try:
214
+ c = self._char_in((yield None)) # noqa
215
+ except GeneratorExit:
216
+ self._raise('Unexpected end of input')
217
+
218
+ if not c:
219
+ self._raise(f'Unterminated string literal: {self._buf.getvalue()}')
220
+
221
+ self._buf.write(c)
222
+ if esc:
223
+ esc = False
224
+ elif c == '\\':
225
+ esc = True
226
+ elif c == '"':
227
+ break
228
+
229
+ src = self._flip_buf()
230
+ yield self._make_tok('STRING', src, pos)
231
+ return self._do_main()
232
+
233
+ def _do_char(self):
234
+ check.state(self._buf.tell() == 0)
235
+ self._buf.write('\\')
236
+
237
+ pos = self.pos
238
+
239
+ while True:
240
+ try:
241
+ c = self._char_in((yield None)) # noqa
242
+ except GeneratorExit:
243
+ self._raise('Unexpected end of input')
244
+
245
+ if not c or not (
246
+ c.isalnum() or
247
+ c == '\\'
248
+ ):
249
+ break
250
+
251
+ self._buf.write(c)
252
+
253
+ src = self._flip_buf()
254
+ yield self._make_tok('CHAR', src, pos)
255
+ return self._do_main(c)
256
+
257
+ def _do_hash(self):
258
+ check.state(self._buf.tell() == 0)
259
+
260
+ pos = self.pos
261
+
262
+ try:
263
+ c = self._char_in((yield None)) # noqa
264
+ except GeneratorExit:
265
+ self._raise('Unexpected end of input')
266
+
267
+ if (ht := HASH_TOKENS.get(c)) is not None:
268
+ yield self._make_tok(ht, '#' + c, pos)
269
+ return self._do_main()
270
+
271
+ elif (
272
+ c.isalnum() or
273
+ c == '#' or
274
+ c in WORD_FIRST_SPECIAL_CHARS
275
+ ):
276
+ return self._do_word('#' + c, pos=pos)
277
+
278
+ else:
279
+ self._raise(f'Unexpected input: {c}')
280
+
281
+ def _do_word(self, pfx: str, *, pos: Position | None = None):
282
+ check.state(self._buf.tell() == 0)
283
+ self._buf.write(pfx)
284
+
285
+ if pos is None:
286
+ pos = self.pos
287
+
288
+ while True:
289
+ try:
290
+ c = self._char_in((yield None)) # noqa
291
+ except GeneratorExit:
292
+ self._raise('Unexpected end of input')
293
+
294
+ if not c or not (
295
+ c.isalnum() or
296
+ c in WORD_FIRST_SPECIAL_CHARS or
297
+ c in WORD_BODY_SPECIAL_CHARS
298
+ ):
299
+ break
300
+
301
+ self._buf.write(c)
302
+
303
+ src = self._flip_buf()
304
+ yield self._make_tok('WORD', src, pos)
305
+ return self._do_main(c)
306
+
307
+
308
+ ##
309
+
310
+
311
+ def test_lex():
312
+ for s in [
313
+ '"abc"',
314
+ '{"a" "b"}',
315
+ '1',
316
+ '-1',
317
+ '{a :b c 420}',
318
+ '#{a}',
319
+ ]:
320
+ print(s)
321
+ with StreamLexer() as lex:
322
+ for c in [*s, '']:
323
+ for t in lex(c):
324
+ print(t)
325
+ print()
@@ -0,0 +1,400 @@
1
+ # ruff: noqa: PYI055 UP007
2
+ """
3
+ TODO:
4
+ - \u0123 in strings
5
+ - https://clojure.org/reference/reader
6
+ - reader meta - ^:foo
7
+ - read table
8
+ """
9
+ import dataclasses as dc
10
+ import datetime
11
+ import enum
12
+ import io
13
+ import re
14
+ import typing as ta
15
+
16
+ from ... import check
17
+ from ...funcs.genmachine import GenMachine
18
+ from .lexing import Position
19
+ from .lexing import StreamLexer
20
+ from .lexing import Token
21
+ from .values import Char
22
+ from .values import Collection
23
+ from .values import Keyword
24
+ from .values import List
25
+ from .values import Map
26
+ from .values import Set
27
+ from .values import Symbol
28
+ from .values import Tagged
29
+ from .values import Vector
30
+
31
+
32
+ ##
33
+
34
+
35
+ WORD_CONST_VALUES: ta.Mapping[str, ta.Any] = {
36
+ 'true': True,
37
+ 'false': False,
38
+ 'nil': None,
39
+
40
+ '##Inf': float('inf'),
41
+ '##-Inf': float('-inf'),
42
+ '##NaN': float('nan'),
43
+ }
44
+
45
+
46
+ STRING_ESCAPE_MAP: ta.Mapping[str, str] = {
47
+ '"': '"',
48
+ '\\': '\\',
49
+ 'b': '\b',
50
+ 'f': '\f',
51
+ 'n': '\n',
52
+ 'r': '\r',
53
+ 't': '\t',
54
+ }
55
+
56
+
57
+ CHAR_ESCAPE_MAP: ta.Mapping[str, str] = {
58
+ 'backspace': '\b',
59
+ 'formfeed': '\f',
60
+ 'newline': '\n',
61
+ 'return': '\r',
62
+ 'space': ' ',
63
+ 'tab': '\t',
64
+ }
65
+
66
+
67
+ ##
68
+
69
+
70
+ @dc.dataclass()
71
+ class StreamParseError(Exception):
72
+ message: str
73
+
74
+ pos: Position | None = None
75
+
76
+
77
+ @dc.dataclass(frozen=True)
78
+ class MetaMaker:
79
+ fn: ta.Callable[..., ta.Any]
80
+
81
+ def __call__(self, *args: ta.Any, meta: ta.Any | None = None) -> ta.Any:
82
+ return self.fn(*args, meta=meta)
83
+
84
+
85
+ class StreamParser(GenMachine[Token, ta.Any]):
86
+ DEFAULT_TAG_HANDLERS: ta.ClassVar[ta.Mapping[str, ta.Callable[..., ta.Any]]] = {
87
+ 'inst': lambda val: datetime.datetime.fromisoformat(val) if isinstance(val, str) else None,
88
+ }
89
+
90
+ def __init__(
91
+ self,
92
+ *,
93
+ keyword_maker: ta.Callable[..., ta.Any] = MetaMaker(Keyword),
94
+ char_maker: ta.Callable[..., ta.Any] = MetaMaker(Char),
95
+ symbol_maker: ta.Callable[..., ta.Any] = MetaMaker(Symbol),
96
+
97
+ list_maker: ta.Callable[..., ta.Any] = MetaMaker(List.new),
98
+ vector_maker: ta.Callable[..., ta.Any] = MetaMaker(Vector.new),
99
+ set_maker: ta.Callable[..., ta.Any] = MetaMaker(Set.new),
100
+ map_maker: ta.Callable[..., ta.Any] = MetaMaker(Map.new),
101
+
102
+ tag_handlers: ta.Mapping[str, ta.Callable[..., ta.Any]] | None = None,
103
+ ) -> None:
104
+ self._keyword_maker = keyword_maker
105
+ self._char_maker = char_maker
106
+ self._symbol_maker = symbol_maker
107
+
108
+ self._list_maker = list_maker
109
+ self._vector_maker = vector_maker
110
+ self._set_maker = set_maker
111
+ self._map_maker = map_maker
112
+
113
+ self._tag_handlers = {
114
+ **self.DEFAULT_TAG_HANDLERS,
115
+ **(tag_handlers or {}),
116
+ }
117
+
118
+ self._stack: list[
119
+ tuple[
120
+ ta.Union[
121
+ type[Collection],
122
+ StreamParser._StackSpecial,
123
+ ],
124
+ list[ta.Any],
125
+ ],
126
+ ] = []
127
+
128
+ super().__init__(self._do_main())
129
+
130
+ class _StackSpecial(enum.Enum): # noqa
131
+ DISCARD = enum.auto()
132
+ TAG = enum.auto()
133
+
134
+ def _emit_value(self, value: ta.Any) -> tuple[ta.Any, ...]:
135
+ while self._stack and self._stack[-1][0] is StreamParser._StackSpecial.TAG:
136
+ cc, cl = self._stack.pop()
137
+ ts = check.non_empty_str(check.single(cl))
138
+ value = Tagged(ts, value)
139
+
140
+ if not self._stack:
141
+ return (value,)
142
+
143
+ cc, cl = self._stack[-1]
144
+
145
+ if cc is StreamParser._StackSpecial.DISCARD:
146
+ check.empty(cl)
147
+ self._stack.pop()
148
+ return ()
149
+
150
+ elif cc is StreamParser._StackSpecial.TAG:
151
+ ts = check.non_empty_str(check.single(cl))
152
+ self._stack.pop()
153
+ tv = Tagged(ts, value)
154
+ return (tv,)
155
+
156
+ elif cc is Map:
157
+ if cl and len(cl[-1]) < 2:
158
+ cl[-1] = (*cl[-1], value)
159
+ else:
160
+ cl.append((value,))
161
+ return ()
162
+
163
+ elif isinstance(cc, type) and issubclass(cc, Collection):
164
+ cl.append(value)
165
+ return ()
166
+
167
+ else:
168
+ raise RuntimeError(cc)
169
+
170
+ def _do_main(self):
171
+ while True:
172
+ tok: Token
173
+ try:
174
+ tok = yield None # noqa
175
+ except GeneratorExit:
176
+ if self._stack:
177
+ raise StreamParseError('Expected value') from None
178
+ else:
179
+ raise
180
+
181
+ value: ta.Any
182
+
183
+ # scalars
184
+
185
+ if tok.kind == 'STRING':
186
+ value = self._parse_string(tok)
187
+
188
+ elif tok.kind == 'CHAR':
189
+ value = self._parse_char(tok)
190
+
191
+ elif tok.kind == 'WORD':
192
+ if tok.src.startswith('#'):
193
+ # FIXME: more dispatching
194
+ self._stack.append((StreamParser._StackSpecial.TAG, [tok.src[1:]]))
195
+ continue
196
+
197
+ else:
198
+ value = self._parse_word(tok)
199
+
200
+ elif tok.kind == 'COMMENT':
201
+ continue
202
+
203
+ # open
204
+
205
+ elif tok.kind == 'LPAREN':
206
+ self._stack.append((List, []))
207
+ continue
208
+
209
+ elif tok.kind == 'LBRACKET':
210
+ self._stack.append((Vector, []))
211
+ continue
212
+
213
+ elif tok.kind == 'HASH_LBRACE':
214
+ self._stack.append((Set, []))
215
+ continue
216
+
217
+ elif tok.kind == 'LBRACE':
218
+ self._stack.append((Map, []))
219
+ continue
220
+
221
+ elif tok.kind == 'HASH_UNDERSCORE':
222
+ self._stack.append((StreamParser._StackSpecial.DISCARD, []))
223
+ continue
224
+
225
+ # close
226
+
227
+ elif tok.kind == 'RPAREN':
228
+ cc, cl = self._stack.pop()
229
+ check.state(cc is List)
230
+ value = self._list_maker(cl)
231
+
232
+ elif tok.kind == 'RBRACKET':
233
+ cc, cl = self._stack.pop()
234
+ check.state(cc is Vector)
235
+ value = self._vector_maker(cl)
236
+
237
+ elif tok.kind == 'RBRACE':
238
+ cc, cl = self._stack.pop()
239
+
240
+ if cc is Set:
241
+ value = self._set_maker(cl)
242
+
243
+ elif cc is Map:
244
+ if cl and len(cl[-1]) != 2:
245
+ raise RuntimeError('Mismatched map entries')
246
+ value = self._map_maker(cl)
247
+
248
+ else:
249
+ raise RuntimeError(cc)
250
+
251
+ # nyi
252
+
253
+ elif tok.kind == 'META':
254
+ raise NotImplementedError
255
+
256
+ elif tok.kind == 'QUOTE':
257
+ raise NotImplementedError
258
+
259
+ # failure
260
+
261
+ else:
262
+ raise ValueError(tok.kind)
263
+
264
+ # emit
265
+
266
+ if (ev := self._emit_value(value)):
267
+ yield ev
268
+
269
+ def _parse_string(self, tok: Token) -> str:
270
+ check.state(tok.kind == 'STRING')
271
+ src = tok.src
272
+ check.state(src[0] == '"')
273
+ check.state(src[-1] == '"')
274
+ check.state(len(src) > 1)
275
+
276
+ p = 1
277
+ end = len(src) - 1
278
+ if src.find('\\', p, end) < 0:
279
+ return src[1:-1]
280
+
281
+ sb = io.StringIO()
282
+ while True:
283
+ if (n := src.find('\\', p, end)) < 0:
284
+ sb.write(src[p:end])
285
+ break
286
+
287
+ sb.write(src[p:n])
288
+ p = n + 1
289
+ check.state(p < end)
290
+ x = src[p]
291
+ p += 1
292
+
293
+ if x == 'u':
294
+ check.state(p < end - 4)
295
+ r = chr(int(src[p:p + 4], 16))
296
+ p += 4
297
+
298
+ else:
299
+ r = STRING_ESCAPE_MAP[x]
300
+
301
+ sb.write(r)
302
+
303
+ return sb.getvalue()
304
+
305
+ def _parse_char(self, tok: Token) -> ta.Any:
306
+ check.state(tok.kind == 'CHAR')
307
+ src = tok.src
308
+ check.state(len(src) > 1)
309
+ check.state(src.startswith('\\'))
310
+
311
+ if len(src) == 2:
312
+ c = src[1]
313
+
314
+ elif src[1] == 'u':
315
+ check.state(len(src) == 6)
316
+ c = chr(int(src[2:], 16))
317
+
318
+ elif src[1] == 'o':
319
+ # \oXXX -> octal
320
+ raise NotImplementedError
321
+
322
+ else:
323
+ c = CHAR_ESCAPE_MAP[src[1:]]
324
+
325
+ return self._char_maker(c)
326
+
327
+ _INT_PAT = re.compile(r'[-+]?(0|[1-9][0-9]*)')
328
+ _BIGINT_PAT = re.compile(r'[-+]?(0|[1-9][0-9]*)N')
329
+ _FLOAT_PAT = re.compile(r'[-+]?(0|[1-9][0-9]*)(\.[0-9]+)?([eE][+-]?(0|[1-9][0-9]*))?M?')
330
+
331
+ def _parse_word(self, tok: Token) -> ta.Any:
332
+ check.state(tok.kind == 'WORD')
333
+ src = tok.src
334
+ check.non_empty_str(src)
335
+ check.state(not src.startswith('#'))
336
+
337
+ if src in WORD_CONST_VALUES:
338
+ return WORD_CONST_VALUES[src]
339
+
340
+ elif src.startswith(':'):
341
+ return self._keyword_maker(src[1:])
342
+
343
+ elif self._INT_PAT.fullmatch(src):
344
+ # FIXME: numbers lol
345
+ # 2r101010, 052, 8r52, 0x2a, 36r16, and 42 are all the same Long.
346
+ # Floating point numbers are read as Doubles; with M suffix they are read as BigDecimals.
347
+ # Ratios are supported, e.g. 22/7.
348
+ return int(src)
349
+
350
+ elif self._BIGINT_PAT.fullmatch(src):
351
+ return int(src[:-1])
352
+
353
+ elif self._FLOAT_PAT.fullmatch(src):
354
+ return float(src)
355
+
356
+ else:
357
+ return self._symbol_maker(src)
358
+
359
+
360
+ ##
361
+
362
+
363
+ def parse_list(src: str, **kwargs: ta.Any) -> list[ta.Any]:
364
+ r: list[ta.Any] = []
365
+ with StreamLexer() as l:
366
+ with StreamParser(**kwargs) as p:
367
+ for c in [*src, '']:
368
+ for t in l(c):
369
+ for o in p(t):
370
+ r.append(o) # noqa
371
+ return r
372
+
373
+
374
+ def parse(src: str, **kwargs: ta.Any) -> ta.Any | None:
375
+ values = parse_list(src, **kwargs)
376
+ if not values:
377
+ return None
378
+ return check.single(values)
379
+
380
+
381
+ ##
382
+
383
+
384
+ def test_parse():
385
+ for s in [
386
+ '"abc"',
387
+ '"a\\bc"',
388
+ '{"a" "b"}',
389
+ '1',
390
+ '-1',
391
+ '{a :b c 420}',
392
+ '#{a}',
393
+ '(1 #_ 2 3)',
394
+ '"foo\u1234bar"',
395
+ '\\x',
396
+ '\\u1234',
397
+ ]:
398
+ print(s)
399
+ print(parse(s))
400
+ print()
@@ -0,0 +1,162 @@
1
+ import dataclasses as dc
2
+ import typing as ta
3
+
4
+ from ... import check
5
+ from ... import lang
6
+ from ...lite.dataclasses import dataclass_cache_hash
7
+
8
+
9
+ ##
10
+
11
+
12
+ _DEBUG = __debug__
13
+ # _DEBUG = True
14
+
15
+
16
+ @dc.dataclass(frozen=True)
17
+ class Value(lang.Abstract, lang.Sealed):
18
+ meta: ta.Any | None = dc.field(default=None, kw_only=True)
19
+
20
+
21
+ #
22
+
23
+
24
+ @dc.dataclass(frozen=True)
25
+ class Scalar(Value, lang.Abstract):
26
+ pass
27
+
28
+
29
+ @dataclass_cache_hash()
30
+ @dc.dataclass(frozen=True)
31
+ class Keyword(Scalar, lang.Final):
32
+ s: str
33
+
34
+ def __repr__(self) -> str:
35
+ return f'{self.__class__.__name__}({self.s!r})'
36
+
37
+ if _DEBUG:
38
+ def __post_init__(self) -> None:
39
+ check.isinstance(self.s, str)
40
+
41
+
42
+ @dataclass_cache_hash()
43
+ @dc.dataclass(frozen=True)
44
+ class Char(Scalar, lang.Final):
45
+ c: str
46
+
47
+ def __repr__(self) -> str:
48
+ return f'{self.__class__.__name__}({self.c!r})'
49
+
50
+ if _DEBUG:
51
+ def __post_init__(self) -> None:
52
+ check.isinstance(self.c, str)
53
+ check.equal(len(self.c), 1)
54
+
55
+
56
+ @dataclass_cache_hash()
57
+ @dc.dataclass(frozen=True)
58
+ class Symbol(Scalar, lang.Final):
59
+ n: str
60
+
61
+ def __repr__(self) -> str:
62
+ return f'{self.__class__.__name__}({self.n!r})'
63
+
64
+ if _DEBUG:
65
+ def __post_init__(self) -> None:
66
+ check.non_empty_str(self.n)
67
+
68
+
69
+ #
70
+
71
+
72
+ @dc.dataclass(frozen=True)
73
+ class Collection(Value, lang.Abstract):
74
+ pass
75
+
76
+
77
+ @dataclass_cache_hash()
78
+ @dc.dataclass(frozen=True)
79
+ class List(Collection, lang.Final):
80
+ items: ta.Sequence[ta.Any]
81
+
82
+ def __repr__(self) -> str:
83
+ return f'{self.__class__.__name__}({self.items!r})'
84
+
85
+ if _DEBUG:
86
+ def __post_init__(self) -> None:
87
+ check.isinstance(self.items, tuple)
88
+
89
+ @classmethod
90
+ def new(cls, items: ta.Iterable[ta.Any], *, meta: ta.Any | None = None) -> 'List':
91
+ return cls(tuple(items), meta=meta)
92
+
93
+
94
+ @dataclass_cache_hash()
95
+ @dc.dataclass(frozen=True)
96
+ class Vector(Collection, lang.Final):
97
+ items: ta.Sequence[ta.Any]
98
+
99
+ def __repr__(self) -> str:
100
+ return f'{self.__class__.__name__}({self.items!r})'
101
+
102
+ if _DEBUG:
103
+ def __post_init__(self) -> None:
104
+ check.isinstance(self.items, tuple)
105
+
106
+ @classmethod
107
+ def new(cls, items: ta.Iterable[ta.Any], *, meta: ta.Any | None = None) -> 'Vector':
108
+ return cls(tuple(items), meta=meta)
109
+
110
+
111
+ @dataclass_cache_hash()
112
+ @dc.dataclass(frozen=True)
113
+ class Set(Collection, lang.Final):
114
+ items: ta.Sequence[ta.Any]
115
+
116
+ def __repr__(self) -> str:
117
+ return f'{self.__class__.__name__}({self.items!r})'
118
+
119
+ if _DEBUG:
120
+ def __post_init__(self) -> None:
121
+ check.isinstance(self.items, tuple)
122
+
123
+ @classmethod
124
+ def new(cls, items: ta.Iterable[ta.Any], *, meta: ta.Any | None = None) -> 'Set':
125
+ return cls(tuple(items), meta=meta)
126
+
127
+
128
+ @dataclass_cache_hash()
129
+ @dc.dataclass(frozen=True)
130
+ class Map(Collection, lang.Final):
131
+ items: ta.Sequence[tuple[ta.Any, ta.Any]]
132
+
133
+ def __repr__(self) -> str:
134
+ return f'{self.__class__.__name__}({self.items!r})'
135
+
136
+ if _DEBUG:
137
+ def __post_init__(self) -> None:
138
+ check.isinstance(self.items, tuple)
139
+ for t in self.items:
140
+ check.isinstance(t, tuple)
141
+ check.equal(len(t), 2)
142
+
143
+ @classmethod
144
+ def new(cls, items: ta.Iterable[ta.Iterable[ta.Any]], *, meta: ta.Any | None = None) -> 'Map':
145
+ return cls(tuple((k, v) for k, v in items), meta=meta)
146
+
147
+
148
+ #
149
+
150
+
151
+ @dataclass_cache_hash()
152
+ @dc.dataclass(frozen=True)
153
+ class Tagged(Value, lang.Final):
154
+ t: str
155
+ v: ta.Any
156
+
157
+ def __repr__(self) -> str:
158
+ return f'{self.__class__.__name__}({self.t!r}, {self.v!r})'
159
+
160
+ if _DEBUG:
161
+ def __post_init__(self) -> None:
162
+ check.non_empty_str(self.t)
@@ -35,11 +35,11 @@ from .json import ( # noqa
35
35
  )
36
36
 
37
37
  if _ta.TYPE_CHECKING:
38
- from .render import ( # noqa
38
+ from .rendering import ( # noqa
39
39
  JsonRenderer,
40
40
  )
41
41
  else:
42
- _lang.proxy_init(globals(), '.render', [
42
+ _lang.proxy_init(globals(), '.rendering', [
43
43
  'JsonRenderer',
44
44
  ])
45
45
 
@@ -1,13 +1,13 @@
1
1
  import typing as ta
2
2
 
3
- from .lex import SCALAR_VALUE_TYPES
4
- from .parse import BeginArray
5
- from .parse import BeginObject
6
- from .parse import EndArray
7
- from .parse import EndObject
8
- from .parse import JsonStreamObject
9
- from .parse import JsonStreamParserEvent
10
- from .parse import Key
3
+ from .lexing import SCALAR_VALUE_TYPES
4
+ from .parsing import BeginArray
5
+ from .parsing import BeginObject
6
+ from .parsing import EndArray
7
+ from .parsing import EndObject
8
+ from .parsing import JsonStreamObject
9
+ from .parsing import JsonStreamParserEvent
10
+ from .parsing import Key
11
11
 
12
12
 
13
13
  ##
@@ -4,11 +4,11 @@ import typing as ta
4
4
  from .... import lang
5
5
  from ....funcs.genmachine import GenMachine
6
6
  from .errors import JsonStreamError
7
- from .lex import SCALAR_VALUE_TYPES
8
- from .lex import VALUE_TOKEN_KINDS
9
- from .lex import Position
10
- from .lex import ScalarValue
11
- from .lex import Token
7
+ from .lexing import SCALAR_VALUE_TYPES
8
+ from .lexing import VALUE_TOKEN_KINDS
9
+ from .lexing import Position
10
+ from .lexing import ScalarValue
11
+ from .lexing import Token
12
12
 
13
13
 
14
14
  ##
@@ -1,14 +1,14 @@
1
1
  import io
2
2
  import typing as ta
3
3
 
4
- from ..render import AbstractJsonRenderer
4
+ from ..rendering import AbstractJsonRenderer
5
5
  from ..types import SCALAR_TYPES
6
- from .parse import BeginArray
7
- from .parse import BeginObject
8
- from .parse import EndArray
9
- from .parse import EndObject
10
- from .parse import JsonStreamParserEvent
11
- from .parse import Key
6
+ from .parsing import BeginArray
7
+ from .parsing import BeginObject
8
+ from .parsing import EndArray
9
+ from .parsing import EndObject
10
+ from .parsing import JsonStreamParserEvent
11
+ from .parsing import Key
12
12
 
13
13
 
14
14
  ##
@@ -2,9 +2,9 @@ import dataclasses as dc
2
2
  import typing as ta
3
3
 
4
4
  from .... import lang
5
- from .build import JsonObjectBuilder
6
- from .lex import JsonStreamLexer
7
- from .parse import JsonStreamParser
5
+ from .building import JsonObjectBuilder
6
+ from .lexing import JsonStreamLexer
7
+ from .parsing import JsonStreamParser
8
8
 
9
9
 
10
10
  ##
@@ -34,7 +34,10 @@ class GenMachine(ta.Generic[I, O]):
34
34
  if initial is None:
35
35
  raise TypeError('No initial state')
36
36
 
37
- self._advance(initial)
37
+ self._gen = initial
38
+
39
+ if (n := next(self._gen)) is not None: # noqa
40
+ raise GenMachine.NotStartedError
38
41
 
39
42
  def _initial_state(self) -> MachineGen | None:
40
43
  return None
@@ -74,33 +77,34 @@ class GenMachine(ta.Generic[I, O]):
74
77
  class Error(Exception):
75
78
  pass
76
79
 
77
- class ClosedError(Exception):
80
+ class NotStartedError(Error):
78
81
  pass
79
82
 
80
- class StateError(Exception):
83
+ class ClosedError(Error):
81
84
  pass
82
85
 
83
- #
84
-
85
- def _advance(self, gen: MachineGen) -> None:
86
- self._gen = gen
86
+ class StateError(Error):
87
+ pass
87
88
 
88
- if (n := next(self._gen)) is not None: # noqa
89
- raise GenMachine.ClosedError
89
+ #
90
90
 
91
91
  def __call__(self, i: I) -> ta.Iterable[O]:
92
92
  if self._gen is None:
93
93
  raise GenMachine.ClosedError
94
94
 
95
95
  gi: I | None = i
96
- try:
97
- while (o := self._gen.send(gi)) is not None:
98
- gi = None
99
- yield from o
96
+ while True:
97
+ try:
98
+ while (o := self._gen.send(gi)) is not None: # type: ignore[union-attr]
99
+ gi = None
100
+ yield from o
100
101
 
101
- except StopIteration as s:
102
- if s.value is None:
103
- self._gen = None
104
- return None
102
+ break
105
103
 
106
- self._advance(s.value)
104
+ except StopIteration as s:
105
+ if (sv := s.value) is None:
106
+ self._gen = None
107
+ return None
108
+
109
+ self._gen = sv
110
+ gi = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omlish
3
- Version: 0.0.0.dev305
3
+ Version: 0.0.0.dev307
4
4
  Summary: omlish
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause
@@ -40,7 +40,7 @@ Requires-Dist: aiomysql~=0.2; extra == "all"
40
40
  Requires-Dist: aiosqlite~=0.21; extra == "all"
41
41
  Requires-Dist: asyncpg~=0.30; extra == "all"
42
42
  Requires-Dist: apsw~=3.49; extra == "all"
43
- Requires-Dist: sqlean.py~=3.47; extra == "all"
43
+ Requires-Dist: sqlean.py~=3.49; extra == "all"
44
44
  Requires-Dist: duckdb~=1.2; extra == "all"
45
45
  Requires-Dist: markupsafe~=3.0; extra == "all"
46
46
  Requires-Dist: jinja2~=3.1; extra == "all"
@@ -88,7 +88,7 @@ Requires-Dist: aiomysql~=0.2; extra == "sqldrivers"
88
88
  Requires-Dist: aiosqlite~=0.21; extra == "sqldrivers"
89
89
  Requires-Dist: asyncpg~=0.30; extra == "sqldrivers"
90
90
  Requires-Dist: apsw~=3.49; extra == "sqldrivers"
91
- Requires-Dist: sqlean.py~=3.47; extra == "sqldrivers"
91
+ Requires-Dist: sqlean.py~=3.49; extra == "sqldrivers"
92
92
  Requires-Dist: duckdb~=1.2; extra == "sqldrivers"
93
93
  Provides-Extra: templates
94
94
  Requires-Dist: markupsafe~=3.0; extra == "templates"
@@ -1,5 +1,5 @@
1
- omlish/.manifests.json,sha256=pjGUyLHaoWpPqRP3jz2u1fC1qoRc2lvrEcpU_Ax2tdg,8253
2
- omlish/__about__.py,sha256=LvSNEgc-zfbkykExJmNhbtQkXdEXRqPSJufulYnCjfw,3478
1
+ omlish/.manifests.json,sha256=orgsRvtpHu8tdhaCvlP9v3P495OJopYYiHKjK68WtWg,8587
2
+ omlish/__about__.py,sha256=YdK-L7JBeMEmm3Dl7hWjoFj3BouojWQPvOl_zSLUgMA,3478
3
3
  omlish/__init__.py,sha256=SsyiITTuK0v74XpKV8dqNaCmjOlan1JZKrHQv5rWKPA,253
4
4
  omlish/c3.py,sha256=rer-TPOFDU6fYq_AWio_AmA-ckZ8JDY5shIzQ_yXfzA,8414
5
5
  omlish/cached.py,sha256=MLap_p0rdGoDIMVhXVHm1tsbcWobJF0OanoodV03Ju8,542
@@ -319,16 +319,21 @@ omlish/formats/props.py,sha256=auCv-Jx79KGlWfyG1-Qo0ou-Ex0W_mF3r_lDFdsVkWI,18920
319
319
  omlish/formats/repr.py,sha256=kYrNs4o-ji8nOdp6u_L3aMgBMWN1ZAZJSAWgQQfStSQ,414
320
320
  omlish/formats/xml.py,sha256=VJfqHR60dhAtjeG8WXFMozFqesTBSGvv264d67eDFXc,3514
321
321
  omlish/formats/yaml.py,sha256=jGPQlTE0vSV-p0O7TJRNlf6o1uq4gx8PrHZe1ApJ_o8,7386
322
+ omlish/formats/edn/__init__.py,sha256=H3q5B-dibXvQV8pmuWizTo6Xk75M7M0M7VPCLt86rpo,195
323
+ omlish/formats/edn/codec.py,sha256=k6-Ra3P3Rlv6JA69-jPLI4nCe5XVes_QJbcsj5DYzMM,454
324
+ omlish/formats/edn/lexing.py,sha256=plwbFwHLOmrr5_QhmzvMTmTK55Ot0DxRgKY6ASSl_-Y,6966
325
+ omlish/formats/edn/parsing.py,sha256=rtoOnDo8TuTo_GAhrNoVHu7Ys7CboALdgOF2PevAJYk,10152
326
+ omlish/formats/edn/values.py,sha256=jf0g88KJIMALxcuH51SoaMWg1HqTUqc1ugldmyyXWoc,3707
322
327
  omlish/formats/ini/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
323
328
  omlish/formats/ini/codec.py,sha256=omuFg0kiDksv8rRlWd_v32ebzEcKlgmiPgGID3bRi2M,631
324
329
  omlish/formats/ini/sections.py,sha256=7wYyZdVTQbMPFpjQEACKJfAEPzUBrogINsrvFgxJoZ0,1015
325
- omlish/formats/json/__init__.py,sha256=1lEtsA5v3F34nlhnWCn2CtoC3cgDIubSsxGz5J9wkZs,780
330
+ omlish/formats/json/__init__.py,sha256=HXFv4VDTBhx0k5U4JYWZqxziBwzJoPX51QK3xIboT8U,786
326
331
  omlish/formats/json/codecs.py,sha256=E5KErfqsgGZq763ixXLT3qysbk5MIsypT92xG5aSaIs,796
327
332
  omlish/formats/json/consts.py,sha256=A0cTAGGLyjo-gcYIQrL4JIaardI0yPMhQoNmh42BaRg,387
328
333
  omlish/formats/json/encoding.py,sha256=O4iIWle7W_-RwpOvJNlqOfkbnDyiQHexV5Za4hlrFzw,497
329
334
  omlish/formats/json/json.py,sha256=Mdqv2vdMi7gp96eV0BIYH5UdWpjWfsh-tSMZeywG-08,331
330
335
  omlish/formats/json/literals.py,sha256=6ptwZyfTXodEtAjDnUhsx6XU3KRZWWYWKYtZ8T7rzsQ,5625
331
- omlish/formats/json/render.py,sha256=r6J5YKSzcxPg_RiG5idGqJ-AIZozJQ_Awj6W_oSGRjk,4555
336
+ omlish/formats/json/rendering.py,sha256=r6J5YKSzcxPg_RiG5idGqJ-AIZozJQ_Awj6W_oSGRjk,4555
332
337
  omlish/formats/json/types.py,sha256=ueO9-uOU2eVWowJf0LH1fHFLjZ6fTIZyq9qybcLQaiQ,147
333
338
  omlish/formats/json/backends/__init__.py,sha256=gnaNDCxy_KmmPUPDnjxO5_WjuWxLGbI9FYWx8ZJuQUU,97
334
339
  omlish/formats/json/backends/base.py,sha256=WqtyoM82pyM0NyqpPwndrebr1bUVU1QlpmVQNrcAO8c,1114
@@ -338,12 +343,12 @@ omlish/formats/json/backends/orjson.py,sha256=wR8pMGFtkhZGHcNVk7vNYUnv8lUapdK89p
338
343
  omlish/formats/json/backends/std.py,sha256=PM00Kh9ZR2XzollHMEvdo35Eml1N-zFfRW-LOCV5ftM,3085
339
344
  omlish/formats/json/backends/ujson.py,sha256=BNJCU4kluGHdqTUKLJEuHhE2m2TmqR7HEN289S0Eokg,2278
340
345
  omlish/formats/json/stream/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
341
- omlish/formats/json/stream/build.py,sha256=EveVyYjc3lbVQRs8Y5uMGEKp0E_gJAUR5bd2ToSVnSU,2533
346
+ omlish/formats/json/stream/building.py,sha256=SGbExmaerqOEiNSom2AERlpyXTj4dpM0QbMW-2WWM2o,2550
342
347
  omlish/formats/json/stream/errors.py,sha256=c8M8UAYmIZ-vWZLeKD2jMj4EDCJbr9QR8Jq_DyHjujQ,43
343
- omlish/formats/json/stream/lex.py,sha256=ItsWvtl5SZH-HwQtPy8Cpf4nszqDzvUTdIOEmSRiZ-E,6807
344
- omlish/formats/json/stream/parse.py,sha256=JuYmXwtTHmQJTFKoJNoEHUpCPxXdl_gvKPykVXgED34,6208
345
- omlish/formats/json/stream/render.py,sha256=NtmDsN92xZi5dkgSSuMeMXMAiJblmjz1arB4Ft7vBhc,3715
346
- omlish/formats/json/stream/utils.py,sha256=QASlxxQGscktsHrORlt8m9V3VWLDakP01QnsSPHLDQ8,1189
348
+ omlish/formats/json/stream/lexing.py,sha256=ItsWvtl5SZH-HwQtPy8Cpf4nszqDzvUTdIOEmSRiZ-E,6807
349
+ omlish/formats/json/stream/parsing.py,sha256=yvH5Llql3Ri_1xDhi1s9CKL6XLJVSQ8vYJ_dz3KAX-4,6223
350
+ omlish/formats/json/stream/rendering.py,sha256=uuJc__MR0G5kypYMAAudBNjBfiIzA_GGli-DWT90428,3730
351
+ omlish/formats/json/stream/utils.py,sha256=UhBRuWbb25wrdQWl8Ttq7xGRLoa329TvNdecGCZxgzg,1197
347
352
  omlish/formats/json5/Json5.g4,sha256=ZUmgJPvj8lSMUD_v3wijp10ZQExYB5mu5Q089dYEJSU,2389
348
353
  omlish/formats/json5/__init__.py,sha256=BsjPz5zJDji3GjQ8x8hWvcl1GYPV_ZIHnE3c2Sr8aTU,102
349
354
  omlish/formats/json5/codec.py,sha256=ldnxCRo0JP1fkGLt0mMxJlLvNxqIF_1KUCcSp1HtI-M,452
@@ -360,7 +365,7 @@ omlish/formats/toml/codec.py,sha256=5HFGWEPd9IFxPlRMRheX8FEDlRIzLe1moHEOj2_PFKU,
360
365
  omlish/formats/toml/parser.py,sha256=c6Hrf6OfVQVtgsYUXL5P5PQQqF-v7r8nkUmprxhV-lI,30536
361
366
  omlish/formats/toml/writer.py,sha256=HIp6XvriXaPTLqyLe-fkIiEf1Pyhsp0TcOg5rFBpO3g,3226
362
367
  omlish/funcs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
363
- omlish/funcs/genmachine.py,sha256=jvctDOJd3X-S2_8C83sgWYz2llYlyhHN7P19-WsanOs,2506
368
+ omlish/funcs/genmachine.py,sha256=8K5fNvFwxctZcVyulOqPLhZ_6Nwh8RAN6yawiE4wFio,2606
364
369
  omlish/funcs/match.py,sha256=gMLZn1enNiFvQaWrQubY300M1BrmdKWzeePihBS7Ywc,6153
365
370
  omlish/funcs/pairs.py,sha256=VCkZjDmJGtR76BsejsHNfb4TcpHCtkkmak-zWDFchAo,3904
366
371
  omlish/funcs/pipes.py,sha256=E7Sz8Aj8ke_vCs5AMNwg1I36kRdHVGTnzxVQaDyn43U,2490
@@ -847,9 +852,9 @@ omlish/typedvalues/holder.py,sha256=ZTnHiw-K38ciOBLEdwgrltr7Xp8jjEs_0Lp69DH-G-o,
847
852
  omlish/typedvalues/marshal.py,sha256=hWHRLcrGav7lvXJDtb9bNI0ickl4SKPQ6F4BbTpqw3A,4219
848
853
  omlish/typedvalues/reflect.py,sha256=Ih1YgU-srUjsvBn_P7C66f73_VCvcwqE3ffeBnZBgt4,674
849
854
  omlish/typedvalues/values.py,sha256=ym46I-q2QJ_6l4UlERqv3yj87R-kp8nCKMRph0xQ3UA,1307
850
- omlish-0.0.0.dev305.dist-info/licenses/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
851
- omlish-0.0.0.dev305.dist-info/METADATA,sha256=xv6KDihpZe0TCjJOQAtfs6Yld4oOHnwCwhVuHNkBYR4,4416
852
- omlish-0.0.0.dev305.dist-info/WHEEL,sha256=wXxTzcEDnjrTwFYjLPcsW_7_XihufBwmpiBeiXNBGEA,91
853
- omlish-0.0.0.dev305.dist-info/entry_points.txt,sha256=Lt84WvRZJskWCAS7xnQGZIeVWksprtUHj0llrvVmod8,35
854
- omlish-0.0.0.dev305.dist-info/top_level.txt,sha256=pePsKdLu7DvtUiecdYXJ78iO80uDNmBlqe-8hOzOmfs,7
855
- omlish-0.0.0.dev305.dist-info/RECORD,,
855
+ omlish-0.0.0.dev307.dist-info/licenses/LICENSE,sha256=B_hVtavaA8zCYDW99DYdcpDLKz1n3BBRjZrcbv8uG8c,1451
856
+ omlish-0.0.0.dev307.dist-info/METADATA,sha256=UgKZG7ukJ09NYqteFnD8DCdWblMH5cosVoyewHJxBTY,4416
857
+ omlish-0.0.0.dev307.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
858
+ omlish-0.0.0.dev307.dist-info/entry_points.txt,sha256=Lt84WvRZJskWCAS7xnQGZIeVWksprtUHj0llrvVmod8,35
859
+ omlish-0.0.0.dev307.dist-info/top_level.txt,sha256=pePsKdLu7DvtUiecdYXJ78iO80uDNmBlqe-8hOzOmfs,7
860
+ omlish-0.0.0.dev307.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.1.0)
2
+ Generator: setuptools (80.3.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
File without changes
File without changes