omlish 0.0.0.dev306__py3-none-any.whl → 0.0.0.dev308__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,11 +35,11 @@ from .json import ( # noqa
35
35
  )
36
36
 
37
37
  if _ta.TYPE_CHECKING:
38
- from .render import ( # noqa
38
+ from .rendering import ( # noqa
39
39
  JsonRenderer,
40
40
  )
41
41
  else:
42
- _lang.proxy_init(globals(), '.render', [
42
+ _lang.proxy_init(globals(), '.rendering', [
43
43
  'JsonRenderer',
44
44
  ])
45
45
 
@@ -1,13 +1,13 @@
1
1
  import typing as ta
2
2
 
3
- from .lex import SCALAR_VALUE_TYPES
4
- from .parse import BeginArray
5
- from .parse import BeginObject
6
- from .parse import EndArray
7
- from .parse import EndObject
8
- from .parse import JsonStreamObject
9
- from .parse import JsonStreamParserEvent
10
- from .parse import Key
3
+ from .lexing import SCALAR_VALUE_TYPES
4
+ from .parsing import BeginArray
5
+ from .parsing import BeginObject
6
+ from .parsing import EndArray
7
+ from .parsing import EndObject
8
+ from .parsing import JsonStreamObject
9
+ from .parsing import JsonStreamParserEvent
10
+ from .parsing import Key
11
11
 
12
12
 
13
13
  ##
@@ -40,7 +40,14 @@ ControlTokenKind: ta.TypeAlias = ta.Literal[
40
40
  'COLON',
41
41
  ]
42
42
 
43
- TokenKind: ta.TypeAlias = ValueTokenKind | ControlTokenKind
43
+ SpaceTokenKind: ta.TypeAlias = ta.Literal['SPACE']
44
+
45
+ TokenKind: ta.TypeAlias = ta.Union[ # noqa
46
+ ValueTokenKind,
47
+ ControlTokenKind,
48
+ SpaceTokenKind,
49
+ ]
50
+
44
51
 
45
52
  #
46
53
 
@@ -109,8 +116,10 @@ class JsonStreamLexer(GenMachine[str, Token]):
109
116
  self,
110
117
  *,
111
118
  include_raw: bool = False,
119
+ include_space: bool = False,
112
120
  ) -> None:
113
121
  self._include_raw = include_raw
122
+ self._include_space = include_space
114
123
 
115
124
  self._ofs = 0
116
125
  self._line = 1
@@ -174,6 +183,8 @@ class JsonStreamLexer(GenMachine[str, Token]):
174
183
  return None
175
184
 
176
185
  if c.isspace():
186
+ if self._include_space:
187
+ yield self._make_tok('SPACE', c, c, self.pos)
177
188
  continue
178
189
 
179
190
  if c in CONTROL_TOKENS:
@@ -282,7 +293,11 @@ class JsonStreamLexer(GenMachine[str, Token]):
282
293
  if c in CONTROL_TOKENS:
283
294
  yield self._make_tok(CONTROL_TOKENS[c], c, c, pos)
284
295
 
285
- elif not c.isspace():
296
+ elif c.isspace():
297
+ if self._include_space:
298
+ yield self._make_tok('SPACE', c, c, self.pos)
299
+
300
+ else:
286
301
  self._raise(f'Unexpected character after number: {c}')
287
302
 
288
303
  return self._do_main()
@@ -4,11 +4,11 @@ import typing as ta
4
4
  from .... import lang
5
5
  from ....funcs.genmachine import GenMachine
6
6
  from .errors import JsonStreamError
7
- from .lex import SCALAR_VALUE_TYPES
8
- from .lex import VALUE_TOKEN_KINDS
9
- from .lex import Position
10
- from .lex import ScalarValue
11
- from .lex import Token
7
+ from .lexing import SCALAR_VALUE_TYPES
8
+ from .lexing import VALUE_TOKEN_KINDS
9
+ from .lexing import Position
10
+ from .lexing import ScalarValue
11
+ from .lexing import Token
12
12
 
13
13
 
14
14
  ##
@@ -102,6 +102,15 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
102
102
 
103
103
  #
104
104
 
105
+ def _next_tok(self):
106
+ while True:
107
+ tok = yield None
108
+
109
+ if tok.kind != 'SPACE':
110
+ return tok
111
+
112
+ #
113
+
105
114
  def _emit_event(self, v):
106
115
  if not self._stack:
107
116
  return ((v,), self._do_value())
@@ -129,7 +138,7 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
129
138
 
130
139
  def _do_value(self, *, must_be_present: bool = False):
131
140
  try:
132
- tok = yield None
141
+ tok = yield from self._next_tok()
133
142
  except GeneratorExit:
134
143
  if self._stack:
135
144
  raise JsonStreamParseError('Expected value') from None
@@ -180,7 +189,7 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
180
189
 
181
190
  def _do_object_body(self, *, must_be_present: bool = False):
182
191
  try:
183
- tok = yield None
192
+ tok = yield from self._next_tok()
184
193
  except GeneratorExit:
185
194
  raise JsonStreamParseError('Expected object body') from None
186
195
 
@@ -188,7 +197,7 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
188
197
  k = tok.value
189
198
 
190
199
  try:
191
- tok = yield None
200
+ tok = yield from self._next_tok()
192
201
  except GeneratorExit:
193
202
  raise JsonStreamParseError('Expected key') from None
194
203
  if tok.kind != 'COLON':
@@ -211,7 +220,7 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
211
220
 
212
221
  def _do_after_pair(self):
213
222
  try:
214
- tok = yield None
223
+ tok = yield from self._next_tok()
215
224
  except GeneratorExit:
216
225
  raise JsonStreamParseError('Expected continuation') from None
217
226
 
@@ -244,7 +253,7 @@ class JsonStreamParser(GenMachine[Token, JsonStreamParserEvent]):
244
253
 
245
254
  def _do_after_element(self):
246
255
  try:
247
- tok = yield None
256
+ tok = yield from self._next_tok()
248
257
  except GeneratorExit:
249
258
  raise JsonStreamParseError('Expected continuation') from None
250
259
 
@@ -1,14 +1,14 @@
1
1
  import io
2
2
  import typing as ta
3
3
 
4
- from ..render import AbstractJsonRenderer
4
+ from ..rendering import AbstractJsonRenderer
5
5
  from ..types import SCALAR_TYPES
6
- from .parse import BeginArray
7
- from .parse import BeginObject
8
- from .parse import EndArray
9
- from .parse import EndObject
10
- from .parse import JsonStreamParserEvent
11
- from .parse import Key
6
+ from .parsing import BeginArray
7
+ from .parsing import BeginObject
8
+ from .parsing import EndArray
9
+ from .parsing import EndObject
10
+ from .parsing import JsonStreamParserEvent
11
+ from .parsing import Key
12
12
 
13
13
 
14
14
  ##
@@ -2,9 +2,9 @@ import dataclasses as dc
2
2
  import typing as ta
3
3
 
4
4
  from .... import lang
5
- from .build import JsonObjectBuilder
6
- from .lex import JsonStreamLexer
7
- from .parse import JsonStreamParser
5
+ from .building import JsonObjectBuilder
6
+ from .lexing import JsonStreamLexer
7
+ from .parsing import JsonStreamParser
8
8
 
9
9
 
10
10
  ##
@@ -34,7 +34,10 @@ class GenMachine(ta.Generic[I, O]):
34
34
  if initial is None:
35
35
  raise TypeError('No initial state')
36
36
 
37
- self._advance(initial)
37
+ self._gen = initial
38
+
39
+ if (n := next(self._gen)) is not None: # noqa
40
+ raise GenMachine.NotStartedError
38
41
 
39
42
  def _initial_state(self) -> MachineGen | None:
40
43
  return None
@@ -74,33 +77,34 @@ class GenMachine(ta.Generic[I, O]):
74
77
  class Error(Exception):
75
78
  pass
76
79
 
77
- class ClosedError(Exception):
80
+ class NotStartedError(Error):
78
81
  pass
79
82
 
80
- class StateError(Exception):
83
+ class ClosedError(Error):
81
84
  pass
82
85
 
83
- #
84
-
85
- def _advance(self, gen: MachineGen) -> None:
86
- self._gen = gen
86
+ class StateError(Error):
87
+ pass
87
88
 
88
- if (n := next(self._gen)) is not None: # noqa
89
- raise GenMachine.ClosedError
89
+ #
90
90
 
91
91
  def __call__(self, i: I) -> ta.Iterable[O]:
92
92
  if self._gen is None:
93
93
  raise GenMachine.ClosedError
94
94
 
95
95
  gi: I | None = i
96
- try:
97
- while (o := self._gen.send(gi)) is not None:
98
- gi = None
99
- yield from o
96
+ while True:
97
+ try:
98
+ while (o := self._gen.send(gi)) is not None: # type: ignore[union-attr]
99
+ gi = None
100
+ yield from o
100
101
 
101
- except StopIteration as s:
102
- if s.value is None:
103
- self._gen = None
104
- return None
102
+ break
105
103
 
106
- self._advance(s.value)
104
+ except StopIteration as s:
105
+ if (sv := s.value) is None:
106
+ self._gen = None
107
+ return None
108
+
109
+ self._gen = sv
110
+ gi = None
omlish/lang/__init__.py CHANGED
@@ -184,12 +184,11 @@ from .functions import ( # noqa
184
184
  )
185
185
 
186
186
  from .generators import ( # noqa
187
- CoroutineGenerator,
188
- Generator,
189
187
  GeneratorLike,
190
188
  GeneratorMappedIterator,
191
189
  autostart,
192
- corogen,
190
+ capture_coroutine,
191
+ capture_generator,
193
192
  genmap,
194
193
  nextgen,
195
194
  )
omlish/lang/generators.py CHANGED
@@ -2,6 +2,7 @@ import abc
2
2
  import functools
3
3
  import typing as ta
4
4
 
5
+ from .classes.restrict import Abstract
5
6
  from .maybes import Maybe
6
7
  from .maybes import empty
7
8
  from .maybes import just
@@ -79,15 +80,29 @@ def adapt_generator_like(gl):
79
80
  ##
80
81
 
81
82
 
82
- class Generator(ta.Generator[O, I, R]):
83
+ class AbstractGeneratorCapture(Abstract, ta.Generic[O, I, R]):
83
84
  def __init__(self, g: ta.Generator[O, I, R]) -> None:
84
85
  super().__init__()
86
+
85
87
  self._g = g
86
88
 
87
89
  @property
88
90
  def g(self) -> ta.Generator[O, I, R]:
89
91
  return self._g
90
92
 
93
+ #
94
+
95
+ def close(self) -> None:
96
+ self._g.close()
97
+
98
+ def __enter__(self) -> ta.Self:
99
+ return self
100
+
101
+ def __exit__(self, exc_type, exc_val, exc_tb):
102
+ self._g.close()
103
+
104
+
105
+ class GeneratorCapture(AbstractGeneratorCapture[O, I, R], ta.Generator[O, I, R]):
91
106
  value: R
92
107
 
93
108
  def __iter__(self):
@@ -114,35 +129,11 @@ class Generator(ta.Generator[O, I, R]):
114
129
  self.value = e.value
115
130
  raise
116
131
 
117
- def close(self):
118
- self._g.close()
119
132
 
133
+ capture_generator = GeneratorCapture
120
134
 
121
- ##
122
-
123
-
124
- class CoroutineGenerator(ta.Generic[O, I, R]):
125
- def __init__(self, g: ta.Generator[O, I, R]) -> None:
126
- super().__init__()
127
- self._g = g
128
-
129
- @property
130
- def g(self) -> ta.Generator[O, I, R]:
131
- return self._g
132
-
133
- #
134
-
135
- def close(self) -> None:
136
- self._g.close()
137
-
138
- def __enter__(self) -> ta.Self:
139
- return self
140
-
141
- def __exit__(self, exc_type, exc_val, exc_tb):
142
- self._g.close()
143
-
144
- #
145
135
 
136
+ class CoroutineGeneratorCapture(AbstractGeneratorCapture[O, I, R]):
146
137
  class Output(ta.NamedTuple, ta.Generic[T]):
147
138
  v: T
148
139
 
@@ -192,7 +183,7 @@ class CoroutineGenerator(ta.Generic[O, I, R]):
192
183
  return self.Yield(o)
193
184
 
194
185
 
195
- corogen = CoroutineGenerator
186
+ capture_coroutine = CoroutineGeneratorCapture
196
187
 
197
188
 
198
189
  ##
@@ -16,6 +16,7 @@ from .types import ( # noqa
16
16
 
17
17
  NotSpecified,
18
18
  is_not_specified,
19
+ check_not_not_specified,
19
20
 
20
21
  Request,
21
22
  request,
@@ -0,0 +1,222 @@
1
+ """
2
+ TODO:
3
+ - kill receive loop on __aexit__
4
+ """
5
+ import builtins
6
+ import json
7
+ import typing as ta
8
+ import uuid
9
+
10
+ import anyio.abc
11
+
12
+ from ... import lang
13
+ from ... import marshal as msh
14
+ from ...asyncs import anyio as aiu
15
+ from ...io.buffers import DelimitingBuffer
16
+ from .types import Error
17
+ from .types import Id
18
+ from .types import Message
19
+ from .types import NotSpecified
20
+ from .types import Object
21
+ from .types import Request
22
+ from .types import Response
23
+ from .types import detect_message_type
24
+ from .types import notification
25
+ from .types import request
26
+
27
+
28
+ ##
29
+
30
+
31
+ @lang.cached_function
32
+ def _create_id() -> str:
33
+ return str(uuid.uuid4())
34
+
35
+
36
+ class JsonrpcConnection:
37
+ def __init__(
38
+ self,
39
+ tg: anyio.abc.TaskGroup,
40
+ stream: anyio.abc.ByteStream,
41
+ *,
42
+ request_handler: ta.Callable[['JsonrpcConnection', Request], ta.Awaitable[None]] | None = None,
43
+ notification_handler: ta.Callable[['JsonrpcConnection', Request], ta.Awaitable[None]] | None = None,
44
+ default_timeout: float | None = 30.,
45
+ ) -> None:
46
+ super().__init__()
47
+
48
+ self._tg = tg
49
+ self._stream = stream
50
+ self._request_handler = request_handler
51
+ self._notification_handler = notification_handler
52
+ self._default_timeout = default_timeout
53
+
54
+ self._buf = DelimitingBuffer(b'\n')
55
+ self._response_futures_by_id: dict[Id, aiu.Future[Response]] = {}
56
+ self._send_lock = anyio.Lock()
57
+ self._received_eof = False
58
+ self._running = True
59
+
60
+ #
61
+
62
+ class Error(Exception):
63
+ """Base class for JSON-RPC related errors."""
64
+
65
+ class TimeoutError(Error, builtins.TimeoutError): # noqa
66
+ """Raised when a request times out."""
67
+
68
+ class ConnectionError(Error, builtins.ConnectionError): # noqa
69
+ """Raised when there are connection-related issues."""
70
+
71
+ class ProtocolError(Error):
72
+ """Raised when there are protocol-related issues."""
73
+
74
+ #
75
+
76
+ async def __aenter__(self) -> 'JsonrpcConnection':
77
+ await self._tg.start(self._receive_loop)
78
+ return self
79
+
80
+ async def __aexit__(self, exc_type: type[BaseException] | None, *_: object) -> None:
81
+ self._running = False
82
+
83
+ ##
84
+
85
+ async def _handle_message(self, msg: Message) -> None:
86
+ if isinstance(msg, Response):
87
+ msg_id = msg.id
88
+ try:
89
+ resp_fut = self._response_futures_by_id[msg_id]
90
+ except KeyError:
91
+ raise NotImplementedError from None
92
+ resp_fut.set_value(msg)
93
+
94
+ elif isinstance(msg, Request):
95
+ if msg.is_notification:
96
+ if (mh := self._notification_handler) is not None:
97
+ await mh(self, msg)
98
+
99
+ else: # noqa
100
+ if (rh := self._request_handler) is not None:
101
+ await rh(self, msg)
102
+
103
+ else:
104
+ raise TypeError(msg)
105
+
106
+ #
107
+
108
+ CLOSED_EXCEPTIONS: ta.ClassVar[tuple[type[Exception], ...]] = (
109
+ anyio.ClosedResourceError,
110
+ anyio.EndOfStream,
111
+ )
112
+
113
+ ERROR_EXCEPTIONS: ta.ClassVar[tuple[type[Exception], ...]] = (
114
+ OSError,
115
+ anyio.BrokenResourceError,
116
+ )
117
+
118
+ async def _receive_message_batch(self) -> list[Message] | None:
119
+ if self._received_eof:
120
+ return None
121
+
122
+ while True:
123
+ try:
124
+ data = await self._stream.receive()
125
+ except self.CLOSED_EXCEPTIONS:
126
+ data = b''
127
+ except self.ERROR_EXCEPTIONS as e:
128
+ raise JsonrpcConnection.ConnectionError('Failed to receive message') from e
129
+
130
+ if not data:
131
+ self._received_eof = True
132
+
133
+ lines = list(self._buf.feed(data))
134
+ if lines:
135
+ break
136
+
137
+ if not data:
138
+ return None
139
+
140
+ msgs: list[Message] = []
141
+ for line in lines:
142
+ if isinstance(line, DelimitingBuffer.Incomplete):
143
+ raise ConnectionError('Received incomplete message')
144
+
145
+ try:
146
+ dct = json.loads(line.decode('utf-8'))
147
+ except (UnicodeDecodeError, json.JSONDecodeError) as e:
148
+ raise JsonrpcConnection.ProtocolError from e
149
+
150
+ mcls = detect_message_type(dct)
151
+ try:
152
+ msg = msh.unmarshal(dct, mcls)
153
+ except Exception as e:
154
+ raise JsonrpcConnection.ProtocolError from e
155
+
156
+ msgs.append(msg)
157
+
158
+ return msgs
159
+
160
+ async def _receive_loop(
161
+ self,
162
+ *,
163
+ task_status: anyio.abc.TaskStatus[ta.Any] = anyio.TASK_STATUS_IGNORED,
164
+ ) -> None:
165
+ task_status.started()
166
+
167
+ while self._running:
168
+ msgs = await self._receive_message_batch()
169
+ if msgs is None:
170
+ break
171
+
172
+ for msg in msgs:
173
+ await self._handle_message(msg)
174
+
175
+ ##
176
+
177
+ async def send_message(self, msg: Message) -> None:
178
+ async with self._send_lock:
179
+ try:
180
+ await self._stream.send(json.dumps(msh.marshal(msg)).encode() + b'\n')
181
+ except self.ERROR_EXCEPTIONS as e:
182
+ raise ConnectionError('Failed to send message') from e
183
+
184
+ #
185
+
186
+ async def request(
187
+ self,
188
+ method: str,
189
+ params: Object | None = None,
190
+ *,
191
+ timeout: float | None = None,
192
+ ) -> ta.Any:
193
+ msg_id = _create_id()
194
+ req = request(msg_id, method, params)
195
+
196
+ fut = aiu.create_future[Response]()
197
+ self._response_futures_by_id[msg_id] = fut
198
+
199
+ try:
200
+ await self.send_message(req)
201
+
202
+ timeout_val = timeout if timeout is not None else self._default_timeout
203
+ try:
204
+ with anyio.fail_after(timeout_val):
205
+ await fut
206
+ except TimeoutError as e:
207
+ raise JsonrpcConnection.TimeoutError(f'Request timed out after {timeout_val} seconds') from e
208
+
209
+ response = fut.outcome.must().unwrap()
210
+
211
+ if response.error is not NotSpecified:
212
+ error = ta.cast(Error, response.error)
213
+ raise JsonrpcConnection.Error(f'Error {error.code}: {error.message}')
214
+
215
+ return response.result
216
+
217
+ finally:
218
+ self._response_futures_by_id.pop(msg_id, None) # noqa
219
+
220
+ async def notify(self, method: str, params: Object | None = None) -> None:
221
+ msg = notification(method, params)
222
+ await self.send_message(msg)
@@ -18,6 +18,8 @@ from ... import lang
18
18
  from ... import marshal as msh
19
19
 
20
20
 
21
+ T = ta.TypeVar('T')
22
+
21
23
  NUMBER_TYPES: tuple[type, ...] = (int, float)
22
24
  Number: ta.TypeAlias = int | float
23
25
 
@@ -44,6 +46,11 @@ def is_not_specified(v: ta.Any) -> bool:
44
46
  return v is NotSpecified
45
47
 
46
48
 
49
+ def check_not_not_specified(v: T | type[NotSpecified]) -> T:
50
+ check.arg(not is_not_specified(v))
51
+ return ta.cast(T, v)
52
+
53
+
47
54
  ##
48
55
 
49
56
 
@@ -87,10 +94,22 @@ class Response(lang.Final):
87
94
 
88
95
  _: dc.KW_ONLY
89
96
 
97
+ #
98
+
90
99
  result: ta.Any = dc.field(default=NotSpecified)
91
100
  error: ta.Union['Error', type[NotSpecified]] = dc.field(default=NotSpecified)
92
101
  dc.validate(lambda self: is_not_specified(self.result) ^ is_not_specified(self.error))
93
102
 
103
+ @property
104
+ def is_result(self) -> bool:
105
+ return not is_not_specified(self.result)
106
+
107
+ @property
108
+ def is_error(self) -> bool:
109
+ return not is_not_specified(self.error)
110
+
111
+ #
112
+
94
113
  jsonrpc: str = dc.field(default=VERSION)
95
114
  dc.validate(lambda self: self.jsonrpc == VERSION)
96
115
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: omlish
3
- Version: 0.0.0.dev306
3
+ Version: 0.0.0.dev308
4
4
  Summary: omlish
5
5
  Author: wrmsr
6
6
  License: BSD-3-Clause