omlish 0.0.0.dev484__py3-none-any.whl → 0.0.0.dev506__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of omlish might be problematic. Click here for more details.
- omlish/CODESTYLE.md +345 -0
- omlish/README.md +199 -0
- omlish/__about__.py +12 -5
- omlish/_check.cc +209 -0
- omlish/check.py +11 -0
- omlish/dataclasses/__init__.py +4 -0
- omlish/dataclasses/impl/concerns/frozen.py +4 -1
- omlish/dataclasses/impl/generation/plans.py +2 -17
- omlish/dataclasses/impl/generation/processor.py +2 -2
- omlish/dataclasses/impl/processing/driving.py +13 -1
- omlish/dataclasses/tools/replace.py +27 -0
- omlish/diag/_pycharm/runhack.py +1 -1
- omlish/dispatch/functions.py +1 -1
- omlish/formats/json/stream/lexing.py +13 -5
- omlish/formats/json/stream/parsing.py +1 -1
- omlish/inject/README.md +430 -0
- omlish/inject/__init__.py +20 -11
- omlish/inject/_dataclasses.py +1545 -1383
- omlish/inject/binder.py +7 -4
- omlish/inject/eagers.py +2 -4
- omlish/inject/elements.py +4 -0
- omlish/inject/helpers/late.py +76 -0
- omlish/inject/{managed.py → helpers/managed.py} +37 -34
- omlish/inject/impl/elements.py +7 -4
- omlish/inject/impl/injector.py +14 -26
- omlish/inject/impl/inspect.py +0 -8
- omlish/inject/impl/origins.py +1 -0
- omlish/inject/impl/privates.py +2 -6
- omlish/inject/impl/providers.py +0 -4
- omlish/inject/impl/scopes.py +14 -18
- omlish/inject/inspect.py +10 -1
- omlish/inject/multis.py +0 -3
- omlish/inject/scopes.py +7 -5
- omlish/io/buffers.py +35 -8
- omlish/lang/__init__.py +10 -0
- omlish/lang/classes/simple.py +2 -1
- omlish/lang/iterables.py +6 -0
- omlish/lang/objects.py +13 -0
- omlish/lang/outcomes.py +1 -1
- omlish/lang/recursion.py +1 -1
- omlish/lang/sequences.py +33 -0
- omlish/lifecycles/README.md +30 -0
- omlish/lifecycles/__init__.py +87 -13
- omlish/lifecycles/_dataclasses.py +1388 -0
- omlish/lifecycles/base.py +178 -64
- omlish/lifecycles/contextmanagers.py +113 -4
- omlish/lifecycles/controller.py +150 -87
- omlish/lifecycles/injection.py +143 -0
- omlish/lifecycles/listeners.py +56 -0
- omlish/lifecycles/managed.py +142 -0
- omlish/lifecycles/manager.py +218 -93
- omlish/lifecycles/states.py +2 -0
- omlish/lifecycles/transitions.py +3 -0
- omlish/lifecycles/unwrap.py +57 -0
- omlish/lite/maybes.py +7 -0
- omlish/lite/typing.py +33 -0
- omlish/logs/_amalg.py +1 -1
- omlish/logs/all.py +36 -11
- omlish/logs/asyncs.py +73 -0
- omlish/logs/base.py +101 -12
- omlish/logs/bisync.py +99 -0
- omlish/logs/contexts.py +4 -1
- omlish/logs/lists.py +125 -0
- omlish/logs/modules.py +19 -1
- omlish/logs/std/loggers.py +6 -1
- omlish/logs/std/noisy.py +11 -9
- omlish/logs/{standard.py → std/standard.py} +3 -4
- omlish/logs/utils.py +16 -1
- omlish/marshal/_dataclasses.py +813 -813
- omlish/reflect/__init__.py +43 -26
- omlish/reflect/ops.py +10 -1
- omlish/specs/jmespath/_dataclasses.py +597 -597
- omlish/specs/jsonschema/keywords/_dataclasses.py +244 -244
- omlish/sql/__init__.py +24 -5
- omlish/sql/api/dbapi.py +1 -1
- omlish/sql/dbapi/__init__.py +15 -0
- omlish/sql/{dbapi.py → dbapi/drivers.py} +2 -2
- omlish/sql/queries/__init__.py +3 -0
- omlish/testing/pytest/plugins/asyncs/plugin.py +2 -0
- omlish/text/docwrap/cli.py +5 -0
- omlish/typedvalues/_collection.cc +500 -0
- omlish/typedvalues/collection.py +159 -62
- omlish/typedvalues/generic.py +5 -4
- omlish/typedvalues/values.py +6 -0
- {omlish-0.0.0.dev484.dist-info → omlish-0.0.0.dev506.dist-info}/METADATA +14 -9
- {omlish-0.0.0.dev484.dist-info → omlish-0.0.0.dev506.dist-info}/RECORD +92 -77
- omlish/lifecycles/abstract.py +0 -86
- /omlish/inject/{impl → helpers}/proxy.py +0 -0
- /omlish/sql/{abc.py → dbapi/abc.py} +0 -0
- {omlish-0.0.0.dev484.dist-info → omlish-0.0.0.dev506.dist-info}/WHEEL +0 -0
- {omlish-0.0.0.dev484.dist-info → omlish-0.0.0.dev506.dist-info}/entry_points.txt +0 -0
- {omlish-0.0.0.dev484.dist-info → omlish-0.0.0.dev506.dist-info}/licenses/LICENSE +0 -0
- {omlish-0.0.0.dev484.dist-info → omlish-0.0.0.dev506.dist-info}/top_level.txt +0 -0
omlish/typedvalues/collection.py
CHANGED
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
# ruff: noqa: UP007
|
|
2
|
+
"""
|
|
3
|
+
TODO:
|
|
4
|
+
- cext _init_typed_values_collection
|
|
5
|
+
"""
|
|
1
6
|
import typing as ta
|
|
2
7
|
|
|
3
8
|
from .. import check
|
|
@@ -19,12 +24,128 @@ UniqueTypedValueT = ta.TypeVar('UniqueTypedValueT', bound='UniqueTypedValue')
|
|
|
19
24
|
|
|
20
25
|
|
|
21
26
|
@dc.dataclass()
|
|
22
|
-
class DuplicateUniqueTypedValueError(Exception
|
|
23
|
-
cls: type
|
|
24
|
-
new:
|
|
25
|
-
old:
|
|
27
|
+
class DuplicateUniqueTypedValueError(Exception):
|
|
28
|
+
cls: type
|
|
29
|
+
new: TypedValue
|
|
30
|
+
old: TypedValue
|
|
26
31
|
|
|
27
32
|
|
|
33
|
+
##
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _init_typed_values_collection(
|
|
37
|
+
*tvs: TypedValueT,
|
|
38
|
+
override: bool = False,
|
|
39
|
+
check_type: type | tuple[type, ...] | None = None,
|
|
40
|
+
) -> tuple[
|
|
41
|
+
tuple[TypedValueT, ...],
|
|
42
|
+
dict[type[TypedValueT], TypedValueT | tuple[TypedValueT, ...]],
|
|
43
|
+
dict[type[TypedValueT], TypedValueT | tuple[TypedValueT, ...]],
|
|
44
|
+
]:
|
|
45
|
+
if not tvs:
|
|
46
|
+
return ((), {}, {})
|
|
47
|
+
|
|
48
|
+
# Either a non-unique TypedValue or a tuple of the form (unique_tv_cls, tv, unique_lst, idx_in_unique_lst). Notably,
|
|
49
|
+
# this intermediate list has the 'opposite' form of the returned collections: where the output dicts have a scalar
|
|
50
|
+
# tv for unique types and a sequence of tv's for non-unique types, this has scalar values for non-unique types and a
|
|
51
|
+
# tuple (heterogeneous, however) for unique types.
|
|
52
|
+
tmp_lst: list[ta.Union[
|
|
53
|
+
TypedValueT,
|
|
54
|
+
tuple[
|
|
55
|
+
type,
|
|
56
|
+
TypedValueT,
|
|
57
|
+
list[TypedValueT],
|
|
58
|
+
int,
|
|
59
|
+
],
|
|
60
|
+
]] = []
|
|
61
|
+
|
|
62
|
+
# When override is False duplicate unique values raises early. When override is True, however, last-in-wins. This
|
|
63
|
+
# could probably rely on dict insertion order preservation and just overwrite in-place, but it's intentionally done
|
|
64
|
+
# explicitly: preservation of tv ordering in all aspects is crucial, and retention of some intermediates eases
|
|
65
|
+
# debugging and error reporting.
|
|
66
|
+
unique_dct: dict[type, list[TypedValueT]] = {}
|
|
67
|
+
|
|
68
|
+
for tv in tvs:
|
|
69
|
+
if check_type is not None:
|
|
70
|
+
if not isinstance(tv, check_type):
|
|
71
|
+
raise TypeError(tv)
|
|
72
|
+
|
|
73
|
+
if isinstance(tv, UniqueTypedValue):
|
|
74
|
+
unique_tv_cls = tv._unique_typed_value_cls # noqa
|
|
75
|
+
|
|
76
|
+
if not override:
|
|
77
|
+
try:
|
|
78
|
+
exu = unique_dct[unique_tv_cls]
|
|
79
|
+
except KeyError:
|
|
80
|
+
pass
|
|
81
|
+
else:
|
|
82
|
+
raise DuplicateUniqueTypedValueError(unique_tv_cls, tv, exu[0])
|
|
83
|
+
|
|
84
|
+
unique_lst = unique_dct.setdefault(unique_tv_cls, [])
|
|
85
|
+
unique_lst.append(tv)
|
|
86
|
+
|
|
87
|
+
tmp_lst.append((unique_tv_cls, tv, unique_lst, len(unique_lst)))
|
|
88
|
+
|
|
89
|
+
elif isinstance(tv, TypedValue):
|
|
90
|
+
tmp_lst.append(tv)
|
|
91
|
+
|
|
92
|
+
else:
|
|
93
|
+
raise TypeError(tv)
|
|
94
|
+
|
|
95
|
+
# The output list with input order preserved and absent of overridden uniques.
|
|
96
|
+
lst: list[TypedValueT] = []
|
|
97
|
+
|
|
98
|
+
# This dict has the expected form: scalar tv's for unique types, and an accumulating list for non-unique types.
|
|
99
|
+
tmp_dct: dict[type, TypedValueT | list[TypedValueT]] = {}
|
|
100
|
+
|
|
101
|
+
for obj in tmp_lst:
|
|
102
|
+
# Unique type
|
|
103
|
+
if isinstance(obj, tuple):
|
|
104
|
+
unique_tv_cls, tv, unique_lst, idx = obj
|
|
105
|
+
|
|
106
|
+
# Last-in-wins
|
|
107
|
+
if idx == len(unique_lst):
|
|
108
|
+
lst.append(tv)
|
|
109
|
+
tmp_dct[unique_tv_cls] = tv
|
|
110
|
+
|
|
111
|
+
else:
|
|
112
|
+
tv = obj
|
|
113
|
+
lst.append(tv)
|
|
114
|
+
tmp_dct.setdefault(type(tv), []).append(tv) # type: ignore[union-attr]
|
|
115
|
+
|
|
116
|
+
# This is the 'canonical' output dict: scalar tv's for unique types keyed by their unique type, and homogenous
|
|
117
|
+
# tuples of tv's keyed by their instance type for non-unique types.
|
|
118
|
+
dct: dict[type, TypedValueT | tuple[TypedValueT, ...]] = {
|
|
119
|
+
k: tuple(v) if isinstance(v, list) else v
|
|
120
|
+
for k, v in tmp_dct.items()
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
# This is the secondary output dict: the contents of previous dict in addition to entries of unique tv's keyed by
|
|
124
|
+
# their instance type. Notably, for unique tv's in which their unique type *is* their instance type (which is
|
|
125
|
+
# perfectly fine) this will squash together duplicate (k, v) pairs, which is also perfectly fine.
|
|
126
|
+
dct2: dict[type, TypedValueT | tuple[TypedValueT, ...]] = {
|
|
127
|
+
**dct,
|
|
128
|
+
**{type(v): v for v in dct.values() if isinstance(v, UniqueTypedValue)},
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
return (tuple(lst), dct, dct2)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
##
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
from . import _collection # type: ignore
|
|
139
|
+
except ImportError:
|
|
140
|
+
pass
|
|
141
|
+
else:
|
|
142
|
+
_init_typed_values_collection = _collection.init_typed_values_collection # noqa
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
##
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
@ta.final
|
|
28
149
|
class TypedValues(
|
|
29
150
|
TypedValuesAccessor[TypedValueT],
|
|
30
151
|
lang.Final,
|
|
@@ -36,79 +157,55 @@ class TypedValues(
|
|
|
36
157
|
override: bool = False,
|
|
37
158
|
check_type: type | tuple[type, ...] | None = None,
|
|
38
159
|
) -> None:
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
if tvs:
|
|
42
|
-
tmp: list = []
|
|
43
|
-
udct: dict = {}
|
|
44
|
-
for tv in tvs:
|
|
45
|
-
if check_type is not None:
|
|
46
|
-
check.isinstance(tv, check_type)
|
|
47
|
-
if isinstance(tv, UniqueTypedValue):
|
|
48
|
-
utvc = tv._unique_typed_value_cls # noqa
|
|
49
|
-
if not override:
|
|
50
|
-
try:
|
|
51
|
-
exu = udct[utvc]
|
|
52
|
-
except KeyError:
|
|
53
|
-
pass
|
|
54
|
-
else:
|
|
55
|
-
raise DuplicateUniqueTypedValueError(utvc, tv, check.single(exu))
|
|
56
|
-
ulst = udct.setdefault(utvc, [])
|
|
57
|
-
ulst.append(tv)
|
|
58
|
-
tmp.append((utvc, tv, ulst, len(ulst)))
|
|
59
|
-
elif isinstance(tv, TypedValue):
|
|
60
|
-
tmp.append(tv)
|
|
61
|
-
else:
|
|
62
|
-
raise TypeError(tv)
|
|
63
|
-
|
|
64
|
-
lst: list = []
|
|
65
|
-
dct: dict = {}
|
|
66
|
-
for obj in tmp:
|
|
67
|
-
if isinstance(obj, tuple):
|
|
68
|
-
utvc, tv, ulst, idx = obj
|
|
69
|
-
if idx == len(ulst):
|
|
70
|
-
lst.append(tv)
|
|
71
|
-
dct[utvc] = tv
|
|
72
|
-
else:
|
|
73
|
-
tv = obj
|
|
74
|
-
lst.append(tv)
|
|
75
|
-
dct.setdefault(type(tv), []).append(tv)
|
|
76
|
-
|
|
77
|
-
tup = tuple(lst)
|
|
78
|
-
dct = {
|
|
79
|
-
k: tuple(v) if isinstance(v, list) else v
|
|
80
|
-
for k, v in dct.items()
|
|
81
|
-
}
|
|
82
|
-
dct2 = {
|
|
83
|
-
**dct,
|
|
84
|
-
**{type(v): v for v in dct.values() if isinstance(v, UniqueTypedValue)},
|
|
85
|
-
}
|
|
160
|
+
self._tup, self._dct, self._dct2 = _init_typed_values_collection(*tvs, override=override, check_type=check_type) # noqa
|
|
86
161
|
|
|
87
|
-
|
|
88
|
-
tup = ()
|
|
89
|
-
dct = {}
|
|
90
|
-
dct2 = {}
|
|
162
|
+
_tup: tuple[TypedValueT, ...]
|
|
91
163
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
164
|
+
# For non unique types, a map from tv instance type to a tuple of instances of that type. For unique tv types, a map
|
|
165
|
+
# from tv unique type to the tv for that unique tv type.
|
|
166
|
+
_dct: dict[type[TypedValueT], TypedValueT | tuple[TypedValueT, ...]]
|
|
167
|
+
|
|
168
|
+
# The contents of the previous dict in addition to entries from unique tv's keyed by their instance type.
|
|
169
|
+
_dct2: dict[type[TypedValueT], TypedValueT | tuple[TypedValueT, ...]]
|
|
95
170
|
|
|
96
171
|
#
|
|
97
172
|
|
|
98
173
|
def without(self, *tys: type) -> ta.Iterator[TypedValueT]:
|
|
99
174
|
for o in self._tup:
|
|
100
|
-
if isinstance(o, tys):
|
|
175
|
+
if tys and isinstance(o, tys):
|
|
101
176
|
continue
|
|
102
177
|
yield o
|
|
103
178
|
|
|
104
179
|
#
|
|
105
180
|
|
|
106
|
-
def update(self, *tvs, override: bool = False) -> 'TypedValues':
|
|
107
|
-
return TypedValues(*self._tup, *tvs, override=override)
|
|
108
|
-
|
|
109
181
|
def discard(self, *tys: type) -> 'TypedValues':
|
|
182
|
+
nl = list(self.without(*tys))
|
|
183
|
+
|
|
184
|
+
if len(nl) == len(self._tup):
|
|
185
|
+
return self
|
|
186
|
+
|
|
110
187
|
return TypedValues(*self.without(*tys))
|
|
111
188
|
|
|
189
|
+
def update(
|
|
190
|
+
self,
|
|
191
|
+
*tvs,
|
|
192
|
+
discard: ta.Iterable[type] | None = None,
|
|
193
|
+
override: bool = False,
|
|
194
|
+
) -> 'TypedValues':
|
|
195
|
+
if not tvs:
|
|
196
|
+
return self
|
|
197
|
+
|
|
198
|
+
n = TypedValues(
|
|
199
|
+
*(self.discard(*discard) if discard else self._tup),
|
|
200
|
+
*tvs,
|
|
201
|
+
override=override,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
if lang.seqs_identical(self._tup, n._tup):
|
|
205
|
+
return self
|
|
206
|
+
|
|
207
|
+
return n
|
|
208
|
+
|
|
112
209
|
#
|
|
113
210
|
|
|
114
211
|
def __repr__(self) -> str:
|
omlish/typedvalues/generic.py
CHANGED
|
@@ -18,10 +18,11 @@ class TypedValueGeneric(lang.Abstract, ta.Generic[TypedValueT]):
|
|
|
18
18
|
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
|
19
19
|
super().__init_subclass__(**kwargs)
|
|
20
20
|
|
|
21
|
-
if '_typed_value_type' in cls.__dict__:
|
|
22
|
-
return
|
|
23
|
-
|
|
24
21
|
g_mro = rfl.ALIAS_UPDATING_GENERIC_SUBSTITUTION.generic_mro(cls)
|
|
25
22
|
g_tvg = check.single(gb for gb in g_mro if isinstance(gb, rfl.Generic) and gb.cls is TypedValueGeneric)
|
|
26
23
|
tvt = check.single(g_tvg.args)
|
|
27
|
-
|
|
24
|
+
|
|
25
|
+
if '_typed_value_type' not in cls.__dict__:
|
|
26
|
+
cls._typed_value_type = tvt
|
|
27
|
+
else:
|
|
28
|
+
check.equal(tvt, cls._typed_value_type)
|
omlish/typedvalues/values.py
CHANGED
|
@@ -22,6 +22,12 @@ _UNIQUE_BASES: set[type[TypedValue]] = set()
|
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
class UniqueTypedValue(TypedValue, lang.Abstract):
|
|
25
|
+
"""
|
|
26
|
+
Inheritance of this abstract class forms the root / key of a family of mutually exclusive TypedValues.
|
|
27
|
+
|
|
28
|
+
The immediately inheriting class may or not be Abstract or Final.
|
|
29
|
+
"""
|
|
30
|
+
|
|
25
31
|
_unique_typed_value_cls: ta.ClassVar[type[TypedValue]]
|
|
26
32
|
|
|
27
33
|
def __init_subclass__(cls, **kwargs: ta.Any) -> None:
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: omlish
|
|
3
|
-
Version: 0.0.0.
|
|
3
|
+
Version: 0.0.0.dev506
|
|
4
4
|
Summary: omlish
|
|
5
5
|
Author: wrmsr
|
|
6
6
|
License-Expression: BSD-3-Clause
|
|
@@ -26,11 +26,11 @@ Requires-Dist: zstandard~=0.25; python_version < "3.14" and extra == "all"
|
|
|
26
26
|
Requires-Dist: brotli~=1.2; extra == "all"
|
|
27
27
|
Requires-Dist: asttokens~=3.0; extra == "all"
|
|
28
28
|
Requires-Dist: executing~=2.2; extra == "all"
|
|
29
|
-
Requires-Dist: psutil~=7.
|
|
29
|
+
Requires-Dist: psutil~=7.2; extra == "all"
|
|
30
30
|
Requires-Dist: orjson~=3.11; extra == "all"
|
|
31
31
|
Requires-Dist: ujson~=5.11; extra == "all"
|
|
32
32
|
Requires-Dist: pyyaml~=6.0; extra == "all"
|
|
33
|
-
Requires-Dist: cbor2~=5.
|
|
33
|
+
Requires-Dist: cbor2~=5.8; extra == "all"
|
|
34
34
|
Requires-Dist: cloudpickle~=3.1; extra == "all"
|
|
35
35
|
Requires-Dist: httpx[http2]~=0.28; extra == "all"
|
|
36
36
|
Requires-Dist: wrapt~=2.0; extra == "all"
|
|
@@ -38,8 +38,9 @@ Requires-Dist: cryptography~=46.0; extra == "all"
|
|
|
38
38
|
Requires-Dist: sqlalchemy[asyncio]~=2.0; extra == "all"
|
|
39
39
|
Requires-Dist: pg8000~=1.31; extra == "all"
|
|
40
40
|
Requires-Dist: pymysql~=1.1; extra == "all"
|
|
41
|
+
Requires-Dist: snowflake-connector-python~=4.2; extra == "all"
|
|
41
42
|
Requires-Dist: aiomysql~=0.3; extra == "all"
|
|
42
|
-
Requires-Dist: aiosqlite~=0.
|
|
43
|
+
Requires-Dist: aiosqlite~=0.22; extra == "all"
|
|
43
44
|
Requires-Dist: asyncpg~=0.31; extra == "all"
|
|
44
45
|
Requires-Dist: apsw~=3.51; extra == "all"
|
|
45
46
|
Requires-Dist: sqlean.py~=3.50; extra == "all"
|
|
@@ -68,12 +69,12 @@ Requires-Dist: brotli~=1.2; extra == "compress"
|
|
|
68
69
|
Provides-Extra: diag
|
|
69
70
|
Requires-Dist: asttokens~=3.0; extra == "diag"
|
|
70
71
|
Requires-Dist: executing~=2.2; extra == "diag"
|
|
71
|
-
Requires-Dist: psutil~=7.
|
|
72
|
+
Requires-Dist: psutil~=7.2; extra == "diag"
|
|
72
73
|
Provides-Extra: formats
|
|
73
74
|
Requires-Dist: orjson~=3.11; extra == "formats"
|
|
74
75
|
Requires-Dist: ujson~=5.11; extra == "formats"
|
|
75
76
|
Requires-Dist: pyyaml~=6.0; extra == "formats"
|
|
76
|
-
Requires-Dist: cbor2~=5.
|
|
77
|
+
Requires-Dist: cbor2~=5.8; extra == "formats"
|
|
77
78
|
Requires-Dist: cloudpickle~=3.1; extra == "formats"
|
|
78
79
|
Provides-Extra: http
|
|
79
80
|
Requires-Dist: httpx[http2]~=0.28; extra == "http"
|
|
@@ -86,8 +87,9 @@ Requires-Dist: sqlalchemy[asyncio]~=2.0; extra == "sqlalchemy"
|
|
|
86
87
|
Provides-Extra: sqldrivers
|
|
87
88
|
Requires-Dist: pg8000~=1.31; extra == "sqldrivers"
|
|
88
89
|
Requires-Dist: pymysql~=1.1; extra == "sqldrivers"
|
|
90
|
+
Requires-Dist: snowflake-connector-python~=4.2; extra == "sqldrivers"
|
|
89
91
|
Requires-Dist: aiomysql~=0.3; extra == "sqldrivers"
|
|
90
|
-
Requires-Dist: aiosqlite~=0.
|
|
92
|
+
Requires-Dist: aiosqlite~=0.22; extra == "sqldrivers"
|
|
91
93
|
Requires-Dist: asyncpg~=0.31; extra == "sqldrivers"
|
|
92
94
|
Requires-Dist: apsw~=3.51; extra == "sqldrivers"
|
|
93
95
|
Requires-Dist: sqlean.py~=3.50; extra == "sqldrivers"
|
|
@@ -167,7 +169,7 @@ dependencies of any kind**.
|
|
|
167
169
|
- An optional [metaclass](https://github.com/wrmsr/omlish/blob/master/omlish/dataclasses/metaclass) which removes the
|
|
168
170
|
need for re-decorating subclasses (with support for inheritance of dataclass parameters like `frozen`), and some
|
|
169
171
|
basic [base classes](https://github.com/wrmsr/omlish/blob/master/omlish/dataclasses/metaclass/bases.py).
|
|
170
|
-
-
|
|
172
|
+
- Support for ahead-of-time / build-time code generation, significantly reducing import times.
|
|
171
173
|
|
|
172
174
|
The stdlib-equivalent api is exported in such a way as to appear to be direct aliases for the stdlib api itself,
|
|
173
175
|
simplifying tool support.
|
|
@@ -247,6 +249,10 @@ dependencies of any kind**.
|
|
|
247
249
|
*any-future-event-loop-impl* without having multiple fighting plugins (*[I know, I know](https://xkcd.com/927/)*).
|
|
248
250
|
- **[plugins](https://github.com/wrmsr/omlish/blob/master/omlish/testing/pytest/plugins)** - Various other plugins.
|
|
249
251
|
|
|
252
|
+
- **[typedvalues](https://github.com/wrmsr/omlish/blob/master/omlish/typedvalues)** - A little toolkit around 'boxed'
|
|
253
|
+
values, whose 'box' types convey more information than the bare values themselves. A rebellion against kwargs / env
|
|
254
|
+
vars / giant config objects: instead of `foo(bar=1, baz=2)`, you do `foo(Bar(1), Baz(2))`.
|
|
255
|
+
|
|
250
256
|
- **[lite](https://github.com/wrmsr/omlish/blob/master/omlish/lite)** - The standard library of 'lite' code. This is the
|
|
251
257
|
only package beneath `lang`, and parts of it are re-exported by it for deduplication. On top of miscellaneous
|
|
252
258
|
utilities it contains a handful of independent, self-contained, significantly simplified 'lite' equivalents of some
|
|
@@ -294,7 +300,6 @@ examples are:
|
|
|
294
300
|
- **anyio** - While lite code must use only asyncio, non-trivial async standard code prefers to be written to anyio.
|
|
295
301
|
- **pytest** - What is used for all standard testing - as lite code has no dependencies of any kind its testing uses
|
|
296
302
|
stdlib's [unittest](https://docs.python.org/3/library/unittest.html).
|
|
297
|
-
- **wrapt** - For (optionally-enabled) injector circular proxies.
|
|
298
303
|
- **sqlalchemy** - The codebase has migrated away from SQLAlchemy in favor of the internal api but it retains it as an
|
|
299
304
|
optional dep to support adapting the internal api to it.
|
|
300
305
|
|