omlish 0.0.0.dev80__py3-none-any.whl → 0.0.0.dev82__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. omlish/__about__.py +4 -4
  2. omlish/dataclasses/impl/__init__.py +8 -0
  3. omlish/dataclasses/impl/params.py +3 -0
  4. omlish/dataclasses/impl/slots.py +61 -7
  5. omlish/formats/json/__init__.py +8 -1
  6. omlish/formats/json/backends/__init__.py +7 -0
  7. omlish/formats/json/backends/base.py +38 -0
  8. omlish/formats/json/backends/default.py +10 -0
  9. omlish/formats/json/backends/jiter.py +25 -0
  10. omlish/formats/json/backends/orjson.py +46 -2
  11. omlish/formats/json/backends/std.py +39 -0
  12. omlish/formats/json/backends/ujson.py +49 -0
  13. omlish/formats/json/cli.py +125 -31
  14. omlish/formats/json/consts.py +22 -0
  15. omlish/formats/json/encoding.py +17 -0
  16. omlish/formats/json/json.py +9 -39
  17. omlish/formats/json/render.py +49 -24
  18. omlish/formats/json/stream/__init__.py +0 -0
  19. omlish/formats/json/stream/build.py +113 -0
  20. omlish/formats/json/stream/lex.py +285 -0
  21. omlish/formats/json/stream/parse.py +244 -0
  22. omlish/formats/json/stream/render.py +119 -0
  23. omlish/genmachine.py +56 -10
  24. omlish/lang/resources.py +6 -1
  25. omlish/marshal/base.py +2 -0
  26. omlish/marshal/newtypes.py +24 -0
  27. omlish/marshal/standard.py +4 -0
  28. omlish/reflect/__init__.py +1 -0
  29. omlish/reflect/types.py +6 -1
  30. {omlish-0.0.0.dev80.dist-info → omlish-0.0.0.dev82.dist-info}/METADATA +5 -5
  31. {omlish-0.0.0.dev80.dist-info → omlish-0.0.0.dev82.dist-info}/RECORD +35 -24
  32. {omlish-0.0.0.dev80.dist-info → omlish-0.0.0.dev82.dist-info}/LICENSE +0 -0
  33. {omlish-0.0.0.dev80.dist-info → omlish-0.0.0.dev82.dist-info}/WHEEL +0 -0
  34. {omlish-0.0.0.dev80.dist-info → omlish-0.0.0.dev82.dist-info}/entry_points.txt +0 -0
  35. {omlish-0.0.0.dev80.dist-info → omlish-0.0.0.dev82.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,244 @@
1
+ import typing as ta
2
+
3
+ from .... import lang
4
+ from ....genmachine import GenMachine
5
+ from .lex import SCALAR_VALUE_TYPES
6
+ from .lex import VALUE_TOKEN_KINDS
7
+ from .lex import ScalarValue
8
+ from .lex import Token
9
+
10
+
11
+ ##
12
+
13
+
14
+ class BeginObject(lang.Marker):
15
+ pass
16
+
17
+
18
+ class Key(ta.NamedTuple):
19
+ key: str
20
+
21
+
22
+ class EndObject(lang.Marker):
23
+ pass
24
+
25
+
26
+ class BeginArray(lang.Marker):
27
+ pass
28
+
29
+
30
+ class EndArray(lang.Marker):
31
+ pass
32
+
33
+
34
+ JsonStreamParserEvent: ta.TypeAlias = ta.Union[ # noqa
35
+ type[BeginObject],
36
+ Key,
37
+ type[EndObject],
38
+
39
+ type[BeginArray],
40
+ type[EndArray],
41
+
42
+ ScalarValue,
43
+ ]
44
+
45
+
46
+ class JsonStreamParserEvents(lang.Namespace):
47
+ BeginObject = BeginObject
48
+ Key = Key
49
+ EndObject = EndObject
50
+
51
+ BeginArray = BeginArray
52
+ EndArray = EndArray
53
+
54
+
55
+ ##
56
+
57
+
58
+ def yield_parser_events(obj: ta.Any) -> ta.Generator[JsonStreamParserEvent, None, None]:
59
+ if isinstance(obj, SCALAR_VALUE_TYPES):
60
+ yield obj # type: ignore
61
+
62
+ elif isinstance(obj, ta.Mapping):
63
+ yield BeginObject
64
+ for k, v in obj.items():
65
+ yield Key(k)
66
+ yield from yield_parser_events(v)
67
+ yield EndObject
68
+
69
+ elif isinstance(obj, ta.Sequence):
70
+ yield BeginArray
71
+ for v in obj:
72
+ yield from yield_parser_events(v)
73
+ yield EndArray
74
+
75
+ else:
76
+ raise TypeError(obj)
77
+
78
+
79
+ ##
80
+
81
+
82
+ class JsonStreamObject(list):
83
+ def __repr__(self) -> str:
84
+ return f'{self.__class__.__name__}({super().__repr__()})'
85
+
86
+
87
+ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
88
+ def __init__(self) -> None:
89
+ super().__init__(self._do_value())
90
+
91
+ self._stack: list[ta.Literal['OBJECT', 'KEY', 'ARRAY']] = []
92
+
93
+ #
94
+
95
+ def _emit_event(self, v):
96
+ if not self._stack:
97
+ return ((v,), self._do_value())
98
+
99
+ tt = self._stack[-1]
100
+ if tt == 'KEY':
101
+ self._stack.pop()
102
+ if not self._stack:
103
+ raise self.StateError
104
+
105
+ tt2 = self._stack[-1]
106
+ if tt2 == 'OBJECT':
107
+ return ((v,), self._do_after_pair())
108
+
109
+ else:
110
+ raise self.StateError
111
+
112
+ elif tt == 'ARRAY':
113
+ return ((v,), self._do_after_element())
114
+
115
+ else:
116
+ raise self.StateError
117
+
118
+ #
119
+
120
+ def _do_value(self):
121
+ try:
122
+ tok = yield None
123
+ except GeneratorExit:
124
+ if self._stack:
125
+ raise self.StateError from None
126
+ else:
127
+ raise
128
+
129
+ if tok.kind in VALUE_TOKEN_KINDS:
130
+ y, r = self._emit_event(tok.value)
131
+ yield y
132
+ return r
133
+
134
+ elif tok.kind == 'LBRACE':
135
+ y, r = self._emit_begin_object()
136
+ yield y
137
+ return r
138
+
139
+ elif tok.kind == 'LBRACKET':
140
+ y, r = self._emit_begin_array()
141
+ yield y
142
+ return r
143
+
144
+ elif tok.kind == 'RBRACKET':
145
+ y, r = self._emit_end_array()
146
+ yield y
147
+ return r
148
+
149
+ else:
150
+ raise self.StateError
151
+
152
+ #
153
+
154
+ def _emit_begin_object(self):
155
+ self._stack.append('OBJECT')
156
+ return ((BeginObject,), self._do_object_body())
157
+
158
+ def _emit_end_object(self):
159
+ if not self._stack:
160
+ raise self.StateError
161
+
162
+ tt = self._stack.pop()
163
+ if tt != 'OBJECT':
164
+ raise self.StateError
165
+
166
+ return self._emit_event(EndObject)
167
+
168
+ def _do_object_body(self):
169
+ try:
170
+ tok = yield None
171
+ except GeneratorExit:
172
+ raise self.StateError from None
173
+
174
+ if tok.kind == 'STRING':
175
+ k = tok.value
176
+
177
+ try:
178
+ tok = yield None
179
+ except GeneratorExit:
180
+ raise self.StateError from None
181
+ if tok.kind != 'COLON':
182
+ raise self.StateError
183
+
184
+ yield (Key(k),)
185
+ self._stack.append('KEY')
186
+ return self._do_value()
187
+
188
+ elif tok.kind == 'RBRACE':
189
+ y, r = self._emit_end_object()
190
+ yield y
191
+ return r
192
+
193
+ else:
194
+ raise self.StateError
195
+
196
+ def _do_after_pair(self):
197
+ try:
198
+ tok = yield None
199
+ except GeneratorExit:
200
+ raise self.StateError from None
201
+
202
+ if tok.kind == 'COMMA':
203
+ return self._do_object_body()
204
+
205
+ elif tok.kind == 'RBRACE':
206
+ y, r = self._emit_end_object()
207
+ yield y
208
+ return r
209
+
210
+ else:
211
+ raise self.StateError
212
+
213
+ #
214
+
215
+ def _emit_begin_array(self):
216
+ self._stack.append('ARRAY')
217
+ return ((BeginArray,), self._do_value())
218
+
219
+ def _emit_end_array(self):
220
+ if not self._stack:
221
+ raise self.StateError
222
+
223
+ tt = self._stack.pop()
224
+ if tt != 'ARRAY':
225
+ raise self.StateError
226
+
227
+ return self._emit_event(EndArray)
228
+
229
+ def _do_after_element(self):
230
+ try:
231
+ tok = yield None
232
+ except GeneratorExit:
233
+ raise self.StateError from None
234
+
235
+ if tok.kind == 'COMMA':
236
+ return self._do_value()
237
+
238
+ elif tok.kind == 'RBRACKET':
239
+ y, r = self._emit_end_array()
240
+ yield y
241
+ return r
242
+
243
+ else:
244
+ raise self.StateError
@@ -0,0 +1,119 @@
1
+ import json
2
+ import typing as ta
3
+
4
+ from ..render import AbstractJsonRenderer
5
+ from ..render import JsonRendererOut
6
+ from .build import JsonObjectBuilder
7
+ from .parse import BeginArray
8
+ from .parse import BeginObject
9
+ from .parse import EndArray
10
+ from .parse import EndObject
11
+ from .parse import JsonStreamParserEvent
12
+ from .parse import Key
13
+
14
+
15
+ ##
16
+
17
+
18
+ class StreamJsonRenderer(AbstractJsonRenderer[ta.Iterable[JsonStreamParserEvent]]):
19
+ def __init__(
20
+ self,
21
+ out: JsonRendererOut,
22
+ opts: AbstractJsonRenderer.Options = AbstractJsonRenderer.Options(),
23
+ ) -> None:
24
+ if opts.sort_keys:
25
+ raise TypeError('Not yet implemented')
26
+
27
+ super().__init__(out, opts)
28
+
29
+ self._stack: list[tuple[ta.Literal['OBJECT', 'ARRAY'], int]] = []
30
+ self._builder: JsonObjectBuilder | None = None
31
+
32
+ def _render_value(
33
+ self,
34
+ o: ta.Any,
35
+ state: AbstractJsonRenderer.State = AbstractJsonRenderer.State.VALUE,
36
+ ) -> None:
37
+ if self._opts.style is not None:
38
+ pre, post = self._opts.style(o, state)
39
+ self._write(pre)
40
+ else:
41
+ post = None
42
+
43
+ if o is None or isinstance(o, bool):
44
+ self._write(self._literals[o])
45
+
46
+ elif isinstance(o, (str, int, float)):
47
+ self._write(json.dumps(o))
48
+
49
+ else:
50
+ raise TypeError(o)
51
+
52
+ if post:
53
+ self._write(post)
54
+
55
+ def _render(self, e: JsonStreamParserEvent) -> None:
56
+ if e != EndArray and self._stack and (tt := self._stack[-1])[0] == 'ARRAY':
57
+ if tt[1]:
58
+ self._write(self._comma)
59
+ self._write_indent()
60
+
61
+ self._stack[-1] = ('ARRAY', tt[1] + 1)
62
+
63
+ #
64
+
65
+ if e is None or isinstance(e, (str, int, float, bool)):
66
+ self._render_value(e)
67
+
68
+ #
69
+
70
+ elif e is BeginObject:
71
+ self._stack.append(('OBJECT', 0))
72
+ self._write('{')
73
+ self._level += 1
74
+
75
+ elif isinstance(e, Key):
76
+ if not self._stack or (tt := self._stack.pop())[0] != 'OBJECT':
77
+ raise Exception
78
+
79
+ if tt[1]:
80
+ self._write(self._comma)
81
+ self._write_indent()
82
+ self._render_value(e.key, AbstractJsonRenderer.State.KEY)
83
+ self._write(self._colon)
84
+
85
+ self._stack.append(('OBJECT', tt[1] + 1))
86
+
87
+ elif e is EndObject:
88
+ if not self._stack or (tt := self._stack.pop())[0] != 'OBJECT':
89
+ raise Exception
90
+
91
+ self._level -= 1
92
+ if tt[1]:
93
+ self._write_indent()
94
+ self._write('}')
95
+
96
+ #
97
+
98
+ elif e is BeginArray:
99
+ self._stack.append(('ARRAY', 0))
100
+ self._write('[')
101
+ self._level += 1
102
+
103
+ elif e is EndArray:
104
+ if not self._stack or (tt := self._stack.pop())[0] != 'ARRAY':
105
+ raise Exception
106
+
107
+ self._level -= 1
108
+ if tt[1]:
109
+ self._write_indent()
110
+ self._write(']')
111
+
112
+ #
113
+
114
+ else:
115
+ raise TypeError(e)
116
+
117
+ def render(self, events: ta.Iterable[JsonStreamParserEvent]) -> None:
118
+ for e in events:
119
+ self._render(e)
omlish/genmachine.py CHANGED
@@ -1,4 +1,8 @@
1
1
  """
2
+ TODO:
3
+ - feed_iter helper
4
+ - accept yielding outputs on transitions, *except* on initial state - add test
5
+
2
6
  See:
3
7
  - https://github.com/pytransitions/transitions
4
8
  """
@@ -15,10 +19,6 @@ MachineGen: ta.TypeAlias = ta.Generator[ta.Any, ta.Any, ta.Any]
15
19
  ##
16
20
 
17
21
 
18
- class IllegalStateError(Exception):
19
- pass
20
-
21
-
22
22
  class GenMachine(ta.Generic[I, O]):
23
23
  """
24
24
  Generator-powered state machine. Generators are sent an `I` object and yield any number of `O` objects in response,
@@ -26,34 +26,80 @@ class GenMachine(ta.Generic[I, O]):
26
26
  `None` to terminate.
27
27
  """
28
28
 
29
- def __init__(self, initial: MachineGen) -> None:
29
+ def __init__(self, initial: MachineGen | None = None) -> None:
30
30
  super().__init__()
31
+
32
+ if initial is None:
33
+ initial = self._initial_state()
34
+ if initial is None:
35
+ raise TypeError('No initial state')
36
+
31
37
  self._advance(initial)
32
38
 
39
+ def _initial_state(self) -> MachineGen | None:
40
+ return None
41
+
42
+ _gen: MachineGen | None
43
+
44
+ def __repr__(self) -> str:
45
+ return f'{self.__class__.__name__}@{hex(id(self))[2:]}<{self.state}>'
46
+
47
+ #
48
+
33
49
  @property
34
50
  def state(self) -> str | None:
35
51
  if self._gen is not None:
36
52
  return self._gen.gi_code.co_qualname
37
53
  return None
38
54
 
39
- def __repr__(self) -> str:
40
- return f'{self.__class__.__name__}@{hex(id(self))[2:]}<{self.state}>'
55
+ #
41
56
 
42
- _gen: MachineGen | None
57
+ @property
58
+ def closed(self) -> bool:
59
+ return self._gen is None
60
+
61
+ def close(self) -> None:
62
+ if self._gen is not None:
63
+ self._gen.close()
64
+ self._gen = None
65
+
66
+ def __enter__(self) -> ta.Self:
67
+ return self
68
+
69
+ def __exit__(self, exc_type, exc_val, exc_tb):
70
+ if exc_type is None:
71
+ self.close()
72
+
73
+ #
74
+
75
+ class Error(Exception):
76
+ pass
77
+
78
+ class ClosedError(Exception):
79
+ pass
80
+
81
+ class StateError(Exception):
82
+ pass
83
+
84
+ #
43
85
 
44
86
  def _advance(self, gen: MachineGen) -> None:
45
87
  self._gen = gen
88
+
46
89
  if (n := next(self._gen)) is not None: # noqa
47
- raise IllegalStateError
90
+ raise GenMachine.ClosedError
48
91
 
49
92
  def __call__(self, i: I) -> ta.Iterable[O]:
50
93
  if self._gen is None:
51
- raise IllegalStateError
94
+ raise GenMachine.ClosedError
95
+
52
96
  try:
53
97
  while (o := self._gen.send(i)) is not None:
54
98
  yield from o
99
+
55
100
  except StopIteration as s:
56
101
  if s.value is None:
57
102
  self._gen = None
58
103
  return None
104
+
59
105
  self._advance(s.value)
omlish/lang/resources.py CHANGED
@@ -1,14 +1,19 @@
1
+ import dataclasses as dc
1
2
  import functools
2
3
  import importlib.resources
3
4
  import os.path
4
5
  import typing as ta
5
6
 
6
7
 
7
- class RelativeResource(ta.NamedTuple):
8
+ @dc.dataclass(frozen=True)
9
+ class RelativeResource:
8
10
  name: str
9
11
  is_file: bool
10
12
  read_bytes: ta.Callable[[], bytes]
11
13
 
14
+ def read_text(self, encoding: str = 'utf-8') -> str:
15
+ return self.read_bytes().decode(encoding)
16
+
12
17
 
13
18
  def get_relative_resources(
14
19
  path: str = '',
omlish/marshal/base.py CHANGED
@@ -6,6 +6,8 @@ TODO:
6
6
  - streaming? Start/EndObject, etc..
7
7
  - lang.Marker - class name, handle type[Foo]
8
8
  - can't disambiguate from str - can't coexist in bare union
9
+ - factories being free MatchFns does more harm than good - in practice these are such big guns you want to write a
10
+ class body if only ceremonially
9
11
 
10
12
  See:
11
13
  - https://github.com/python-attrs/cattrs
@@ -0,0 +1,24 @@
1
+ from .. import check
2
+ from .. import reflect as rfl
3
+ from .base import MarshalContext
4
+ from .base import Marshaler
5
+ from .base import MarshalerFactory
6
+ from .base import UnmarshalContext
7
+ from .base import Unmarshaler
8
+ from .base import UnmarshalerFactory
9
+
10
+
11
+ class NewtypeMarshalerFactory(MarshalerFactory):
12
+ def guard(self, ctx: MarshalContext, rty: rfl.Type) -> bool:
13
+ return isinstance(rty, rfl.NewType)
14
+
15
+ def fn(self, ctx: MarshalContext, rty: rfl.Type) -> Marshaler:
16
+ return ctx.make(check.isinstance(rty, rfl.NewType).ty)
17
+
18
+
19
+ class NewtypeUnmarshalerFactory(UnmarshalerFactory):
20
+ def guard(self, ctx: UnmarshalContext, rty: rfl.Type) -> bool:
21
+ return isinstance(rty, rfl.NewType)
22
+
23
+ def fn(self, ctx: UnmarshalContext, rty: rfl.Type) -> Unmarshaler:
24
+ return ctx.make(check.isinstance(rty, rfl.NewType).ty)
@@ -21,6 +21,8 @@ from .mappings import MappingMarshalerFactory
21
21
  from .mappings import MappingUnmarshalerFactory
22
22
  from .maybes import MaybeMarshalerFactory
23
23
  from .maybes import MaybeUnmarshalerFactory
24
+ from .newtypes import NewtypeMarshalerFactory
25
+ from .newtypes import NewtypeUnmarshalerFactory
24
26
  from .numbers import NUMBERS_MARSHALER_FACTORY
25
27
  from .numbers import NUMBERS_UNMARSHALER_FACTORY
26
28
  from .optionals import OptionalMarshalerFactory
@@ -38,6 +40,7 @@ from .uuids import UUID_UNMARSHALER_FACTORY
38
40
 
39
41
  STANDARD_MARSHALER_FACTORIES: list[MarshalerFactory] = [
40
42
  PRIMITIVE_MARSHALER_FACTORY,
43
+ NewtypeMarshalerFactory(),
41
44
  OptionalMarshalerFactory(),
42
45
  PrimitiveUnionMarshalerFactory(),
43
46
  DataclassMarshalerFactory(),
@@ -68,6 +71,7 @@ def new_standard_marshaler_factory() -> MarshalerFactory:
68
71
 
69
72
  STANDARD_UNMARSHALER_FACTORIES: list[UnmarshalerFactory] = [
70
73
  PRIMITIVE_UNMARSHALER_FACTORY,
74
+ NewtypeUnmarshalerFactory(),
71
75
  OptionalUnmarshalerFactory(),
72
76
  PrimitiveUnionUnmarshalerFactory(),
73
77
  DataclassUnmarshalerFactory(),
@@ -33,6 +33,7 @@ from .types import ( # noqa
33
33
  TYPES,
34
34
  Type,
35
35
  Union,
36
+ get_newtype_supertype,
36
37
  get_orig_bases,
37
38
  get_orig_class,
38
39
  get_params,
omlish/reflect/types.py CHANGED
@@ -107,6 +107,10 @@ def get_orig_class(obj: ta.Any) -> ta.Any:
107
107
  return obj.__orig_class__ # noqa
108
108
 
109
109
 
110
+ def get_newtype_supertype(obj: ta.Any) -> ta.Any:
111
+ return obj.__supertype__
112
+
113
+
110
114
  ##
111
115
 
112
116
 
@@ -164,6 +168,7 @@ class Generic:
164
168
  @dc.dataclass(frozen=True)
165
169
  class NewType:
166
170
  obj: ta.Any
171
+ ty: Type
167
172
 
168
173
 
169
174
  @dc.dataclass(frozen=True)
@@ -243,7 +248,7 @@ class Reflector:
243
248
  return Union(frozenset(self.type(a) for a in ta.get_args(obj)))
244
249
 
245
250
  if isinstance(obj, ta.NewType): # noqa
246
- return NewType(obj)
251
+ return NewType(obj, get_newtype_supertype(obj))
247
252
 
248
253
  if (
249
254
  is_simple_generic_alias_type(oty) or
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: omlish
3
- Version: 0.0.0.dev80
3
+ Version: 0.0.0.dev82
4
4
  Summary: omlish
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause
@@ -19,6 +19,7 @@ Requires-Dist: greenlet ~=3.1 ; extra == 'all'
19
19
  Requires-Dist: trio ~=0.27 ; extra == 'all'
20
20
  Requires-Dist: trio-asyncio ~=0.15 ; extra == 'all'
21
21
  Requires-Dist: lz4 ~=4.3 ; extra == 'all'
22
+ Requires-Dist: python-snappy ~=0.7 ; extra == 'all'
22
23
  Requires-Dist: zstd ~=1.5 ; extra == 'all'
23
24
  Requires-Dist: asttokens ~=2.4 ; extra == 'all'
24
25
  Requires-Dist: executing ~=2.1 ; extra == 'all'
@@ -37,11 +38,10 @@ Requires-Dist: pg8000 ~=1.31 ; extra == 'all'
37
38
  Requires-Dist: pymysql ~=1.1 ; extra == 'all'
38
39
  Requires-Dist: aiomysql ~=0.2 ; extra == 'all'
39
40
  Requires-Dist: aiosqlite ~=0.20 ; extra == 'all'
41
+ Requires-Dist: asyncpg ~=0.30 ; extra == 'all'
40
42
  Requires-Dist: apsw ~=3.46 ; extra == 'all'
41
43
  Requires-Dist: duckdb ~=1.1 ; extra == 'all'
42
44
  Requires-Dist: pytest ~=8.0 ; extra == 'all'
43
- Requires-Dist: python-snappy ~=0.7 ; (python_version < "3.13") and extra == 'all'
44
- Requires-Dist: asyncpg ~=0.30 ; (python_version < "3.13") and extra == 'all'
45
45
  Requires-Dist: sqlean.py ~=3.45 ; (python_version < "3.13") and extra == 'all'
46
46
  Provides-Extra: async
47
47
  Requires-Dist: anyio ~=4.6 ; extra == 'async'
@@ -51,8 +51,8 @@ Requires-Dist: trio ~=0.27 ; extra == 'async'
51
51
  Requires-Dist: trio-asyncio ~=0.15 ; extra == 'async'
52
52
  Provides-Extra: compress
53
53
  Requires-Dist: lz4 ~=4.3 ; extra == 'compress'
54
+ Requires-Dist: python-snappy ~=0.7 ; extra == 'compress'
54
55
  Requires-Dist: zstd ~=1.5 ; extra == 'compress'
55
- Requires-Dist: python-snappy ~=0.7 ; (python_version < "3.13") and extra == 'compress'
56
56
  Provides-Extra: diag
57
57
  Requires-Dist: asttokens ~=2.4 ; extra == 'diag'
58
58
  Requires-Dist: executing ~=2.1 ; extra == 'diag'
@@ -85,9 +85,9 @@ Requires-Dist: pg8000 ~=1.31 ; extra == 'sqldrivers'
85
85
  Requires-Dist: pymysql ~=1.1 ; extra == 'sqldrivers'
86
86
  Requires-Dist: aiomysql ~=0.2 ; extra == 'sqldrivers'
87
87
  Requires-Dist: aiosqlite ~=0.20 ; extra == 'sqldrivers'
88
+ Requires-Dist: asyncpg ~=0.30 ; extra == 'sqldrivers'
88
89
  Requires-Dist: apsw ~=3.46 ; extra == 'sqldrivers'
89
90
  Requires-Dist: duckdb ~=1.1 ; extra == 'sqldrivers'
90
- Requires-Dist: asyncpg ~=0.30 ; (python_version < "3.13") and extra == 'sqldrivers'
91
91
  Requires-Dist: sqlean.py ~=3.45 ; (python_version < "3.13") and extra == 'sqldrivers'
92
92
  Provides-Extra: testing
93
93
  Requires-Dist: pytest ~=8.0 ; extra == 'testing'