omlish 0.0.0.dev437__py3-none-any.whl → 0.0.0.dev439__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (102) hide show
  1. omlish/__about__.py +2 -2
  2. omlish/dataclasses/impl/api/classes/make.py +1 -1
  3. omlish/dataclasses/tools/static.py +1 -1
  4. omlish/formats/json/stream/__init__.py +5 -3
  5. omlish/formats/json/stream/building.py +2 -2
  6. omlish/formats/json/stream/lexing.py +187 -42
  7. omlish/formats/json/stream/parsing.py +31 -9
  8. omlish/formats/json/stream/rendering.py +6 -6
  9. omlish/formats/json/stream/utils.py +106 -33
  10. omlish/formats/json5/literals.py +7 -4
  11. omlish/formats/json5/parsing.py +33 -79
  12. omlish/formats/json5/stream.py +45 -50
  13. omlish/http/all.py +59 -53
  14. omlish/inject/__init__.py +1 -0
  15. omlish/iterators/__init__.py +2 -0
  16. omlish/iterators/transforms.py +204 -0
  17. omlish/lang/classes/bindable.py +1 -1
  18. omlish/lang/classes/restrict.py +8 -0
  19. omlish/lite/inject.py +1 -0
  20. omlish/lite/marshal.py +1 -0
  21. omlish/reflect/types.py +2 -2
  22. omlish/sql/queries/_marshal.py +1 -1
  23. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/METADATA +2 -2
  24. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/RECORD +28 -101
  25. omlish/formats/json5/Json5.g4 +0 -168
  26. omlish/formats/json5/_antlr/Json5Lexer.py +0 -354
  27. omlish/formats/json5/_antlr/Json5Listener.py +0 -79
  28. omlish/formats/json5/_antlr/Json5Parser.py +0 -617
  29. omlish/formats/json5/_antlr/Json5Visitor.py +0 -52
  30. omlish/formats/json5/_antlr/__init__.py +0 -0
  31. omlish/text/antlr/__init__.py +0 -3
  32. omlish/text/antlr/_runtime/BufferedTokenStream.py +0 -305
  33. omlish/text/antlr/_runtime/CommonTokenFactory.py +0 -64
  34. omlish/text/antlr/_runtime/CommonTokenStream.py +0 -90
  35. omlish/text/antlr/_runtime/FileStream.py +0 -30
  36. omlish/text/antlr/_runtime/InputStream.py +0 -90
  37. omlish/text/antlr/_runtime/IntervalSet.py +0 -183
  38. omlish/text/antlr/_runtime/LICENSE.txt +0 -28
  39. omlish/text/antlr/_runtime/LL1Analyzer.py +0 -176
  40. omlish/text/antlr/_runtime/Lexer.py +0 -332
  41. omlish/text/antlr/_runtime/ListTokenSource.py +0 -147
  42. omlish/text/antlr/_runtime/Parser.py +0 -583
  43. omlish/text/antlr/_runtime/ParserInterpreter.py +0 -173
  44. omlish/text/antlr/_runtime/ParserRuleContext.py +0 -189
  45. omlish/text/antlr/_runtime/PredictionContext.py +0 -632
  46. omlish/text/antlr/_runtime/Recognizer.py +0 -150
  47. omlish/text/antlr/_runtime/RuleContext.py +0 -230
  48. omlish/text/antlr/_runtime/StdinStream.py +0 -14
  49. omlish/text/antlr/_runtime/Token.py +0 -158
  50. omlish/text/antlr/_runtime/TokenStreamRewriter.py +0 -258
  51. omlish/text/antlr/_runtime/Utils.py +0 -36
  52. omlish/text/antlr/_runtime/__init__.py +0 -2
  53. omlish/text/antlr/_runtime/_all.py +0 -24
  54. omlish/text/antlr/_runtime/_pygrun.py +0 -174
  55. omlish/text/antlr/_runtime/atn/ATN.py +0 -135
  56. omlish/text/antlr/_runtime/atn/ATNConfig.py +0 -162
  57. omlish/text/antlr/_runtime/atn/ATNConfigSet.py +0 -215
  58. omlish/text/antlr/_runtime/atn/ATNDeserializationOptions.py +0 -27
  59. omlish/text/antlr/_runtime/atn/ATNDeserializer.py +0 -449
  60. omlish/text/antlr/_runtime/atn/ATNSimulator.py +0 -50
  61. omlish/text/antlr/_runtime/atn/ATNState.py +0 -267
  62. omlish/text/antlr/_runtime/atn/ATNType.py +0 -20
  63. omlish/text/antlr/_runtime/atn/LexerATNSimulator.py +0 -573
  64. omlish/text/antlr/_runtime/atn/LexerAction.py +0 -301
  65. omlish/text/antlr/_runtime/atn/LexerActionExecutor.py +0 -146
  66. omlish/text/antlr/_runtime/atn/ParserATNSimulator.py +0 -1664
  67. omlish/text/antlr/_runtime/atn/PredictionMode.py +0 -502
  68. omlish/text/antlr/_runtime/atn/SemanticContext.py +0 -333
  69. omlish/text/antlr/_runtime/atn/Transition.py +0 -271
  70. omlish/text/antlr/_runtime/atn/__init__.py +0 -4
  71. omlish/text/antlr/_runtime/dfa/DFA.py +0 -136
  72. omlish/text/antlr/_runtime/dfa/DFASerializer.py +0 -76
  73. omlish/text/antlr/_runtime/dfa/DFAState.py +0 -129
  74. omlish/text/antlr/_runtime/dfa/__init__.py +0 -4
  75. omlish/text/antlr/_runtime/error/DiagnosticErrorListener.py +0 -111
  76. omlish/text/antlr/_runtime/error/ErrorListener.py +0 -75
  77. omlish/text/antlr/_runtime/error/ErrorStrategy.py +0 -712
  78. omlish/text/antlr/_runtime/error/Errors.py +0 -176
  79. omlish/text/antlr/_runtime/error/__init__.py +0 -4
  80. omlish/text/antlr/_runtime/tree/Chunk.py +0 -33
  81. omlish/text/antlr/_runtime/tree/ParseTreeMatch.py +0 -121
  82. omlish/text/antlr/_runtime/tree/ParseTreePattern.py +0 -75
  83. omlish/text/antlr/_runtime/tree/ParseTreePatternMatcher.py +0 -377
  84. omlish/text/antlr/_runtime/tree/RuleTagToken.py +0 -53
  85. omlish/text/antlr/_runtime/tree/TokenTagToken.py +0 -50
  86. omlish/text/antlr/_runtime/tree/Tree.py +0 -194
  87. omlish/text/antlr/_runtime/tree/Trees.py +0 -114
  88. omlish/text/antlr/_runtime/tree/__init__.py +0 -2
  89. omlish/text/antlr/_runtime/xpath/XPath.py +0 -278
  90. omlish/text/antlr/_runtime/xpath/XPathLexer.py +0 -98
  91. omlish/text/antlr/_runtime/xpath/__init__.py +0 -4
  92. omlish/text/antlr/delimit.py +0 -109
  93. omlish/text/antlr/dot.py +0 -41
  94. omlish/text/antlr/errors.py +0 -14
  95. omlish/text/antlr/input.py +0 -96
  96. omlish/text/antlr/parsing.py +0 -54
  97. omlish/text/antlr/runtime.py +0 -102
  98. omlish/text/antlr/utils.py +0 -38
  99. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/WHEEL +0 -0
  100. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/entry_points.txt +0 -0
  101. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/licenses/LICENSE +0 -0
  102. {omlish-0.0.0.dev437.dist-info → omlish-0.0.0.dev439.dist-info}/top_level.txt +0 -0
@@ -23,63 +23,136 @@ TODO:
23
23
  - Names and values separated by = or => instead of :.
24
24
  - Name/value pairs separated by ; instead of ,.
25
25
  """
26
- import dataclasses as dc
26
+ import itertools
27
27
  import typing as ta
28
28
 
29
29
  from .... import lang
30
30
  from .building import JsonValueBuilder
31
+ from .errors import JsonStreamError
31
32
  from .lexing import JsonStreamLexer
33
+ from .lexing import Token
34
+ from .parsing import Event
32
35
  from .parsing import JsonStreamParser
33
36
 
34
37
 
35
38
  ##
36
39
 
37
40
 
38
- @dc.dataclass(kw_only=True)
39
41
  class JsonStreamValueParser(lang.ExitStacked):
40
- include_raw: bool = False
41
- yield_object_lists: bool = False
42
+ class Machinery(ta.NamedTuple):
43
+ lex: JsonStreamLexer
44
+ parse: JsonStreamParser
45
+ build: JsonValueBuilder
42
46
 
43
- json5: bool = False
47
+ def __init__(self, m: Machinery) -> None:
48
+ super().__init__()
44
49
 
45
- #
50
+ self._m = m
46
51
 
47
- _lex: JsonStreamLexer = dc.field(init=False)
48
- _parse: JsonStreamParser = dc.field(init=False)
49
- _build: JsonValueBuilder = dc.field(init=False)
52
+ #
50
53
 
51
54
  def _enter_contexts(self) -> None:
52
- self._lex = JsonStreamLexer(
53
- include_raw=self.include_raw,
54
- allow_comments=self.json5,
55
- allow_single_quotes=self.json5,
56
- )
55
+ self._enter_context(self._m.lex)
56
+ self._enter_context(self._m.parse)
57
+
58
+ def feed(self, i: ta.Iterable[str]) -> ta.Iterator[ta.Any]:
59
+ for c in i:
60
+ for t in self._m.lex(c):
61
+ for e in self._m.parse(t):
62
+ for v in self._m.build(e): # noqa
63
+ yield v
64
+
65
+ #
66
+
67
+ @classmethod
68
+ def parse_values(
69
+ cls,
70
+ m: Machinery,
71
+ i: ta.Iterable[str],
72
+ ) -> ta.Iterator[ta.Any]:
73
+ with cls(m) as p:
74
+ yield from p.feed(itertools.chain(i, ['']))
75
+
76
+ @classmethod
77
+ def parse_one_value(
78
+ cls,
79
+ m: Machinery,
80
+ i: ta.Iterable[str],
81
+ ) -> ta.Any:
82
+ with cls(m) as p:
83
+ return next(p.feed(itertools.chain(i, [''])))
84
+
85
+ @classmethod
86
+ def parse_exactly_one_value(
87
+ cls,
88
+ m: Machinery,
89
+ i: ta.Iterable[str],
90
+ ) -> ta.Any:
91
+ r: ta.Any
92
+ r = not_set = object()
93
+ with cls(m) as p:
94
+ for v in p.feed(itertools.chain(i, [''])):
95
+ if r is not_set:
96
+ r = v
97
+ else:
98
+ raise JsonStreamError('Unexpected input')
99
+ if r is not_set:
100
+ raise JsonStreamError('No value')
101
+ return r
102
+
103
+
104
+ ##
105
+
57
106
 
58
- self._parse = JsonStreamParser()
107
+ class DebugJsonStreamValueParser(JsonStreamValueParser):
108
+ def __init__(self, m: JsonStreamValueParser.Machinery) -> None:
109
+ super().__init__(m)
59
110
 
60
- self._build = JsonValueBuilder(
61
- yield_object_lists=self.yield_object_lists,
62
- )
111
+ self._chars: list[str] = []
112
+ self._tokens: list[Token] = []
113
+ self._events: list[Event] = []
114
+ self._values: list[ta.Any] = []
63
115
 
64
116
  def feed(self, i: ta.Iterable[str]) -> ta.Iterator[ta.Any]:
65
117
  for c in i:
66
- for t in self._lex(c):
67
- for e in self._parse(t):
68
- for v in self._build(e): # noqa
118
+ self._chars.append(c)
119
+ for t in self._m.lex(c):
120
+ self._tokens.append(t)
121
+ for e in self._m.parse(t):
122
+ self._events.append(e)
123
+ for v in self._m.build(e):
124
+ self._values.append(v)
69
125
  yield v
70
126
 
71
127
 
72
- def stream_parse_values(
73
- i: ta.Iterable[str],
74
- **kwargs: ta.Any,
75
- ) -> ta.Generator[ta.Any]:
76
- with JsonStreamValueParser(**kwargs) as p:
77
- yield from p.feed(i)
128
+ ##
129
+
130
+
131
+ def make_machinery(
132
+ *,
133
+ include_raw: bool = False,
134
+ yield_object_lists: bool = False,
135
+ ) -> JsonStreamValueParser.Machinery:
136
+ return JsonStreamValueParser.Machinery(
137
+ JsonStreamLexer(
138
+ include_raw=include_raw,
139
+ ),
140
+
141
+ JsonStreamParser(),
142
+
143
+ JsonValueBuilder(
144
+ yield_object_lists=yield_object_lists,
145
+ ),
146
+ )
147
+
148
+
149
+ def stream_parse_values(i: ta.Iterable[str], **kwargs: ta.Any) -> ta.Iterator[ta.Any]:
150
+ return JsonStreamValueParser.parse_values(make_machinery(**kwargs), i)
151
+
152
+
153
+ def stream_parse_one_value(i: ta.Iterable[str], **kwargs: ta.Any) -> ta.Any:
154
+ return JsonStreamValueParser.parse_one_value(make_machinery(**kwargs), i)
78
155
 
79
156
 
80
- def stream_parse_one_value(
81
- i: ta.Iterable[str],
82
- **kwargs: ta.Any,
83
- ) -> ta.Any:
84
- with JsonStreamValueParser(**kwargs) as p:
85
- return next(p.feed(i))
157
+ def stream_parse_exactly_one_value(i: ta.Iterable[str], **kwargs: ta.Any) -> ta.Any:
158
+ return JsonStreamValueParser.parse_exactly_one_value(make_machinery(**kwargs), i)
@@ -124,7 +124,10 @@ def parse_string_literal(s: str) -> str:
124
124
  def parse_number_literal(s: str) -> int | float:
125
125
  s = s.lower()
126
126
 
127
- if 'x' in s:
128
- return int(s, 16)
129
- else:
130
- return float(s)
127
+ try:
128
+ if 'x' in s:
129
+ return int(s, 16)
130
+ else:
131
+ return float(s)
132
+ except ValueError as e:
133
+ raise Json5Error from e
@@ -1,100 +1,54 @@
1
- # ruff: noqa: N802 N803
2
1
  import typing as ta
3
2
 
4
- from ...text import antlr
5
- from ._antlr.Json5Lexer import Json5Lexer # type: ignore
6
- from ._antlr.Json5Parser import Json5Parser # type: ignore
7
- from ._antlr.Json5Visitor import Json5Visitor # type: ignore
3
+ from ..json.stream.errors import JsonStreamError
4
+ from ..json.stream.utils import DebugJsonStreamValueParser
5
+ from ..json.stream.utils import JsonStreamValueParser
8
6
  from .errors import Json5Error
9
- from .literals import LITERAL_VALUES
10
- from .literals import parse_number_literal
11
- from .literals import parse_string_literal
7
+ from .stream import make_machinery
12
8
 
13
9
 
14
10
  ##
15
11
 
16
12
 
17
- class Json5ParseVisitor(antlr.parsing.StandardParseTreeVisitor, Json5Visitor):
18
- def visitArr(self, ctx: Json5Parser.ArrContext):
19
- return [self.visit(e) for e in ctx.value()]
13
+ def parse(
14
+ buf: str,
15
+ *,
16
+ debug: bool = False,
17
+ ) -> ta.Any:
18
+ m = make_machinery()
20
19
 
21
- def visitKey(self, ctx: Json5Parser.KeyContext):
22
- if (s := ctx.STRING()) is not None:
23
- return parse_string_literal(s.getText())
20
+ if debug:
21
+ vc: type[JsonStreamValueParser] = DebugJsonStreamValueParser
22
+ else:
23
+ vc = JsonStreamValueParser
24
24
 
25
- elif (i := ctx.IDENTIFIER()) is not None:
26
- return parse_string_literal(''.join(['"', i.getText(), '"']))
27
-
28
- elif (l := ctx.LITERAL()) is not None:
29
- return LITERAL_VALUES[l.getText()]
30
-
31
- elif (n := ctx.NUMERIC_LITERAL()) is not None:
32
- return n.getText()
33
-
34
- else:
35
- raise RuntimeError(ctx)
36
-
37
- def visitNumber(self, ctx: Json5Parser.NumberContext):
38
- return parse_number_literal(ctx.getText())
39
-
40
- def visitObj(self, ctx: Json5Parser.ObjContext):
41
- dct: dict[ta.Any, ta.Any] = {}
42
- for pair in ctx.pair():
43
- key, value = self.visit(pair)
44
- dct[key] = value
45
- return dct
46
-
47
- def visitPair(self, ctx: Json5Parser.PairContext):
48
- key = self.visit(ctx.key())
49
- value = self.visit(ctx.value())
50
- return (key, value)
51
-
52
- def visitValue(self, ctx: Json5Parser.ValueContext):
53
- if (s := ctx.STRING()) is not None:
54
- return parse_string_literal(s.getText())
55
-
56
- elif (n := ctx.LITERAL()) is not None:
57
- return LITERAL_VALUES[n.getText()]
58
-
59
- else:
60
- return super().visitChildren(ctx)
61
-
62
-
63
- def _make_parser(buf: str) -> Json5Parser:
64
- return antlr.parsing.make_parser(
65
- buf,
66
- Json5Lexer,
67
- Json5Parser,
68
- silent_errors=True,
69
- )
70
-
71
-
72
- def parse(buf: str) -> ta.Any:
73
25
  try:
74
- root = _make_parser(buf).json5()
26
+ return vc.parse_exactly_one_value(m, buf)
27
+
28
+ except Json5Error:
29
+ raise
75
30
 
76
- except antlr.errors.ParseError as e:
31
+ except JsonStreamError as e:
77
32
  raise Json5Error from e
78
33
 
79
- if antlr.parsing.is_eof_context(root):
80
- raise Json5Error('Empty input')
81
34
 
82
- visitor = Json5ParseVisitor()
83
- return visitor.visit(root)
35
+ def parse_many(
36
+ buf: str,
37
+ *,
38
+ debug: bool = False,
39
+ ) -> ta.Iterator[ta.Any]:
40
+ m = make_machinery()
84
41
 
42
+ if debug:
43
+ vc: type[JsonStreamValueParser] = DebugJsonStreamValueParser
44
+ else:
45
+ vc = JsonStreamValueParser
85
46
 
86
- def parse_many(buf: str) -> ta.Generator[ta.Any]:
87
47
  try:
88
- parser = _make_parser(buf)
89
-
90
- while True:
91
- if parser.getInputStream().LT(1).type == antlr.runtime.Token.EOF:
92
- break
93
-
94
- value = parser.value()
48
+ yield from vc.parse_values(m, buf)
95
49
 
96
- visitor = Json5ParseVisitor()
97
- yield visitor.visit(value)
50
+ except Json5Error:
51
+ raise
98
52
 
99
- except antlr.errors.ParseError as e:
53
+ except JsonStreamError as e:
100
54
  raise Json5Error from e
@@ -1,82 +1,77 @@
1
1
  """
2
- TODO:
3
-
4
2
  Objects:
5
- - Object keys may be an ECMAScript 5.1 IdentifierName.
6
- - Objects may have a single trailing comma.
3
+ + Object keys may be an ECMAScript 5.1 IdentifierName.
4
+ + Objects may have a single trailing comma.
7
5
  Arrays:
8
- - Arrays may have a single trailing comma.
6
+ + Arrays may have a single trailing comma.
9
7
  Strings:
10
- - Strings may be single quoted.
11
- - Strings may span multiple lines by escaping new line characters.
12
- - Strings may include character escapes.
8
+ + Strings may be single quoted.
9
+ + Strings may span multiple lines by escaping new line characters.
10
+ + Strings may include character escapes.
13
11
  Numbers:
14
- - Numbers may be hexadecimal.
15
- - Numbers may have a leading or trailing decimal point.
16
- - Numbers may be IEEE 754 positive infinity, negative infinity, and NaN.
17
- - Numbers may begin with an explicit plus sign.
12
+ + Numbers may be hexadecimal.
13
+ + Numbers may have a leading or trailing decimal point.
14
+ + Numbers may be IEEE 754 positive infinity, negative infinity, and NaN.
15
+ + Numbers may begin with an explicit plus sign.
18
16
  Comments:
19
17
  + Single and multi-line comments are allowed.
20
18
  White Space:
21
- - Additional white space characters are allowed.
19
+ + Additional white space characters are allowed.
22
20
  """
23
- import dataclasses as dc
24
21
  import typing as ta
25
22
 
26
- from ... import lang
27
23
  from ..json.stream.building import JsonValueBuilder
28
24
  from ..json.stream.lexing import JsonStreamLexer
29
25
  from ..json.stream.parsing import JsonStreamParser
26
+ from ..json.stream.utils import JsonStreamValueParser
27
+ from .literals import parse_number_literal
30
28
  from .literals import parse_string_literal
31
29
 
32
30
 
33
31
  ##
34
32
 
35
33
 
36
- @dc.dataclass(kw_only=True)
37
- class JsonStreamValueParser(lang.ExitStacked):
38
- include_raw: bool = False
39
- yield_object_lists: bool = False
40
-
41
- #
34
+ def make_machinery(
35
+ *,
36
+ include_raw: bool = False,
37
+ yield_object_lists: bool = False,
38
+ ) -> JsonStreamValueParser.Machinery:
39
+ return JsonStreamValueParser.Machinery(
40
+ JsonStreamLexer(
41
+ include_raw=include_raw,
42
42
 
43
- _lex: JsonStreamLexer = dc.field(init=False)
44
- _parse: JsonStreamParser = dc.field(init=False)
45
- _build: JsonValueBuilder = dc.field(init=False)
43
+ allow_extended_space=True,
46
44
 
47
- def _enter_contexts(self) -> None:
48
- self._lex = JsonStreamLexer(
49
- include_raw=self.include_raw,
50
45
  allow_comments=True,
46
+
51
47
  allow_single_quotes=True,
52
48
  string_literal_parser=parse_string_literal,
53
- )
54
49
 
55
- self._parse = JsonStreamParser()
50
+ allow_extended_number_literals=True,
51
+ number_literal_parser=parse_number_literal,
52
+
53
+ allow_extended_idents=True,
54
+ ),
55
+
56
+ JsonStreamParser(
57
+ allow_trailing_commas=True,
58
+
59
+ allow_extended_idents=True,
60
+ ),
61
+
62
+ JsonValueBuilder(
63
+ yield_object_lists=yield_object_lists,
64
+ ),
65
+ )
56
66
 
57
- self._build = JsonValueBuilder(
58
- yield_object_lists=self.yield_object_lists,
59
- )
60
67
 
61
- def feed(self, i: ta.Iterable[str]) -> ta.Iterator[ta.Any]:
62
- for c in i:
63
- for t in self._lex(c):
64
- for e in self._parse(t):
65
- for v in self._build(e): # noqa
66
- yield v
68
+ def stream_parse_values(i: ta.Iterable[str], **kwargs: ta.Any) -> ta.Iterator[ta.Any]:
69
+ return JsonStreamValueParser.parse_values(make_machinery(**kwargs), i)
67
70
 
68
71
 
69
- def stream_parse_values(
70
- i: ta.Iterable[str],
71
- **kwargs: ta.Any,
72
- ) -> ta.Generator[ta.Any]:
73
- with JsonStreamValueParser(**kwargs) as p:
74
- yield from p.feed(i)
72
+ def stream_parse_one_value(i: ta.Iterable[str], **kwargs: ta.Any) -> ta.Any:
73
+ return JsonStreamValueParser.parse_one_value(make_machinery(**kwargs), i)
75
74
 
76
75
 
77
- def stream_parse_one_value(
78
- i: ta.Iterable[str],
79
- **kwargs: ta.Any,
80
- ) -> ta.Any:
81
- with JsonStreamValueParser(**kwargs) as p:
82
- return next(p.feed(i))
76
+ def stream_parse_exactly_one_value(i: ta.Iterable[str], **kwargs: ta.Any) -> ta.Any:
77
+ return JsonStreamValueParser.parse_exactly_one_value(make_machinery(**kwargs), i)
omlish/http/all.py CHANGED
@@ -1,53 +1,59 @@
1
- from . import consts # noqa
2
-
3
- from .clients import ( # noqa
4
- BaseHttpResponse,
5
- HttpClient,
6
- HttpClientError,
7
- HttpRequest,
8
- HttpResponse,
9
- HttpxHttpClient,
10
- StreamHttpResponse,
11
- UrllibHttpClient,
12
- client,
13
- close_response,
14
- closing_response,
15
- read_response,
16
- request,
17
- )
18
-
19
- from .cookies import ( # noqa
20
- CookieTooBigError,
21
- dump_cookie,
22
- parse_cookie,
23
- )
24
-
25
- from .dates import ( # noqa
26
- http_date,
27
- parse_date,
28
- )
29
-
30
- from .encodings import ( # noqa
31
- latin1_decode,
32
- latin1_encode,
33
- )
34
-
35
- from .headers import ( # noqa
36
- CanHttpHeaders,
37
- HttpHeaders,
38
- headers,
39
- )
40
-
41
- from .json import ( # noqa
42
- JSON_TAGGER,
43
- JsonTag,
44
- JsonTagger,
45
- json_dumps,
46
- json_loads,
47
- )
48
-
49
- from .multipart import ( # noqa
50
- MultipartData,
51
- MultipartEncoder,
52
- MultipartField,
53
- )
1
+ from .. import lang as _lang
2
+
3
+
4
+ with _lang.auto_proxy_init(globals()):
5
+ ##
6
+
7
+ from . import consts # noqa
8
+
9
+ from .clients import ( # noqa
10
+ BaseHttpResponse,
11
+ HttpClient,
12
+ HttpClientError,
13
+ HttpRequest,
14
+ HttpResponse,
15
+ HttpxHttpClient,
16
+ StreamHttpResponse,
17
+ UrllibHttpClient,
18
+ client,
19
+ close_response,
20
+ closing_response,
21
+ read_response,
22
+ request,
23
+ )
24
+
25
+ from .cookies import ( # noqa
26
+ CookieTooBigError,
27
+ dump_cookie,
28
+ parse_cookie,
29
+ )
30
+
31
+ from .dates import ( # noqa
32
+ http_date,
33
+ parse_date,
34
+ )
35
+
36
+ from .encodings import ( # noqa
37
+ latin1_decode,
38
+ latin1_encode,
39
+ )
40
+
41
+ from .headers import ( # noqa
42
+ CanHttpHeaders,
43
+ HttpHeaders,
44
+ headers,
45
+ )
46
+
47
+ from .json import ( # noqa
48
+ JSON_TAGGER,
49
+ JsonTag,
50
+ JsonTagger,
51
+ json_dumps,
52
+ json_loads,
53
+ )
54
+
55
+ from .multipart import ( # noqa
56
+ MultipartData,
57
+ MultipartEncoder,
58
+ MultipartField,
59
+ )
omlish/inject/__init__.py CHANGED
@@ -123,6 +123,7 @@ with _lang.auto_proxy_init(globals()):
123
123
  )
124
124
 
125
125
  from .providers import ( # noqa
126
+ AsyncFnProvider,
126
127
  ConstProvider,
127
128
  CtorProvider,
128
129
  FnProvider,
@@ -22,6 +22,8 @@ with _lang.auto_proxy_init(globals()):
22
22
  unzip,
23
23
  )
24
24
 
25
+ from . import transforms as tf # noqa
26
+
25
27
  from .unique import ( # noqa
26
28
  UniqueItem,
27
29
  UniqueIterator,