betterproto2-compiler 0.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. betterproto2_compiler/__init__.py +0 -0
  2. betterproto2_compiler/_types.py +13 -0
  3. betterproto2_compiler/casing.py +140 -0
  4. betterproto2_compiler/compile/__init__.py +0 -0
  5. betterproto2_compiler/compile/importing.py +193 -0
  6. betterproto2_compiler/compile/naming.py +21 -0
  7. betterproto2_compiler/enum.py +180 -0
  8. betterproto2_compiler/grpc/__init__.py +0 -0
  9. betterproto2_compiler/grpc/grpclib_client.py +172 -0
  10. betterproto2_compiler/grpc/grpclib_server.py +32 -0
  11. betterproto2_compiler/grpc/util/__init__.py +0 -0
  12. betterproto2_compiler/grpc/util/async_channel.py +190 -0
  13. betterproto2_compiler/lib/__init__.py +0 -0
  14. betterproto2_compiler/lib/google/__init__.py +0 -0
  15. betterproto2_compiler/lib/google/protobuf/__init__.py +1 -0
  16. betterproto2_compiler/lib/google/protobuf/compiler/__init__.py +1 -0
  17. betterproto2_compiler/lib/pydantic/__init__.py +0 -0
  18. betterproto2_compiler/lib/pydantic/google/__init__.py +0 -0
  19. betterproto2_compiler/lib/pydantic/google/protobuf/__init__.py +2690 -0
  20. betterproto2_compiler/lib/pydantic/google/protobuf/compiler/__init__.py +209 -0
  21. betterproto2_compiler/lib/std/__init__.py +0 -0
  22. betterproto2_compiler/lib/std/google/__init__.py +0 -0
  23. betterproto2_compiler/lib/std/google/protobuf/__init__.py +2517 -0
  24. betterproto2_compiler/lib/std/google/protobuf/compiler/__init__.py +197 -0
  25. betterproto2_compiler/plugin/__init__.py +3 -0
  26. betterproto2_compiler/plugin/__main__.py +3 -0
  27. betterproto2_compiler/plugin/compiler.py +59 -0
  28. betterproto2_compiler/plugin/main.py +52 -0
  29. betterproto2_compiler/plugin/models.py +709 -0
  30. betterproto2_compiler/plugin/module_validation.py +161 -0
  31. betterproto2_compiler/plugin/parser.py +263 -0
  32. betterproto2_compiler/plugin/plugin.bat +2 -0
  33. betterproto2_compiler/plugin/typing_compiler.py +167 -0
  34. betterproto2_compiler/py.typed +0 -0
  35. betterproto2_compiler/templates/header.py.j2 +50 -0
  36. betterproto2_compiler/templates/template.py.j2 +243 -0
  37. betterproto2_compiler-0.0.1.dist-info/LICENSE.md +22 -0
  38. betterproto2_compiler-0.0.1.dist-info/METADATA +35 -0
  39. betterproto2_compiler-0.0.1.dist-info/RECORD +41 -0
  40. betterproto2_compiler-0.0.1.dist-info/WHEEL +4 -0
  41. betterproto2_compiler-0.0.1.dist-info/entry_points.txt +3 -0
@@ -0,0 +1,172 @@
1
+ import asyncio
2
+ from abc import ABC
3
+ from typing import (
4
+ TYPE_CHECKING,
5
+ AsyncIterable,
6
+ AsyncIterator,
7
+ Collection,
8
+ Iterable,
9
+ Mapping,
10
+ Optional,
11
+ Tuple,
12
+ Type,
13
+ Union,
14
+ )
15
+
16
+ import grpclib.const
17
+
18
+ if TYPE_CHECKING:
19
+ from grpclib.client import Channel
20
+ from grpclib.metadata import Deadline
21
+
22
+ from .._types import (
23
+ IProtoMessage,
24
+ T,
25
+ )
26
+
27
+
28
+ Value = Union[str, bytes]
29
+ MetadataLike = Union[Mapping[str, Value], Collection[Tuple[str, Value]]]
30
+ MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]]
31
+
32
+
33
+ class ServiceStub(ABC):
34
+ """
35
+ Base class for async gRPC clients.
36
+ """
37
+
38
+ def __init__(
39
+ self,
40
+ channel: "Channel",
41
+ *,
42
+ timeout: Optional[float] = None,
43
+ deadline: Optional["Deadline"] = None,
44
+ metadata: Optional[MetadataLike] = None,
45
+ ) -> None:
46
+ self.channel = channel
47
+ self.timeout = timeout
48
+ self.deadline = deadline
49
+ self.metadata = metadata
50
+
51
+ def __resolve_request_kwargs(
52
+ self,
53
+ timeout: Optional[float],
54
+ deadline: Optional["Deadline"],
55
+ metadata: Optional[MetadataLike],
56
+ ):
57
+ return {
58
+ "timeout": self.timeout if timeout is None else timeout,
59
+ "deadline": self.deadline if deadline is None else deadline,
60
+ "metadata": self.metadata if metadata is None else metadata,
61
+ }
62
+
63
+ async def _unary_unary(
64
+ self,
65
+ route: str,
66
+ request: "IProtoMessage",
67
+ response_type: Type["T"],
68
+ *,
69
+ timeout: Optional[float] = None,
70
+ deadline: Optional["Deadline"] = None,
71
+ metadata: Optional[MetadataLike] = None,
72
+ ) -> "T":
73
+ """Make a unary request and return the response."""
74
+ async with self.channel.request(
75
+ route,
76
+ grpclib.const.Cardinality.UNARY_UNARY,
77
+ type(request),
78
+ response_type,
79
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
80
+ ) as stream:
81
+ await stream.send_message(request, end=True)
82
+ response = await stream.recv_message()
83
+ assert response is not None
84
+ return response
85
+
86
+ async def _unary_stream(
87
+ self,
88
+ route: str,
89
+ request: "IProtoMessage",
90
+ response_type: Type["T"],
91
+ *,
92
+ timeout: Optional[float] = None,
93
+ deadline: Optional["Deadline"] = None,
94
+ metadata: Optional[MetadataLike] = None,
95
+ ) -> AsyncIterator["T"]:
96
+ """Make a unary request and return the stream response iterator."""
97
+ async with self.channel.request(
98
+ route,
99
+ grpclib.const.Cardinality.UNARY_STREAM,
100
+ type(request),
101
+ response_type,
102
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
103
+ ) as stream:
104
+ await stream.send_message(request, end=True)
105
+ async for message in stream:
106
+ yield message
107
+
108
+ async def _stream_unary(
109
+ self,
110
+ route: str,
111
+ request_iterator: MessageSource,
112
+ request_type: Type["IProtoMessage"],
113
+ response_type: Type["T"],
114
+ *,
115
+ timeout: Optional[float] = None,
116
+ deadline: Optional["Deadline"] = None,
117
+ metadata: Optional[MetadataLike] = None,
118
+ ) -> "T":
119
+ """Make a stream request and return the response."""
120
+ async with self.channel.request(
121
+ route,
122
+ grpclib.const.Cardinality.STREAM_UNARY,
123
+ request_type,
124
+ response_type,
125
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
126
+ ) as stream:
127
+ await stream.send_request()
128
+ await self._send_messages(stream, request_iterator)
129
+ response = await stream.recv_message()
130
+ assert response is not None
131
+ return response
132
+
133
+ async def _stream_stream(
134
+ self,
135
+ route: str,
136
+ request_iterator: MessageSource,
137
+ request_type: Type["IProtoMessage"],
138
+ response_type: Type["T"],
139
+ *,
140
+ timeout: Optional[float] = None,
141
+ deadline: Optional["Deadline"] = None,
142
+ metadata: Optional[MetadataLike] = None,
143
+ ) -> AsyncIterator["T"]:
144
+ """
145
+ Make a stream request and return an AsyncIterator to iterate over response
146
+ messages.
147
+ """
148
+ async with self.channel.request(
149
+ route,
150
+ grpclib.const.Cardinality.STREAM_STREAM,
151
+ request_type,
152
+ response_type,
153
+ **self.__resolve_request_kwargs(timeout, deadline, metadata),
154
+ ) as stream:
155
+ await stream.send_request()
156
+ sending_task = asyncio.ensure_future(self._send_messages(stream, request_iterator))
157
+ try:
158
+ async for response in stream:
159
+ yield response
160
+ except:
161
+ sending_task.cancel()
162
+ raise
163
+
164
+ @staticmethod
165
+ async def _send_messages(stream, messages: MessageSource):
166
+ if isinstance(messages, AsyncIterable):
167
+ async for message in messages:
168
+ await stream.send_message(message)
169
+ else:
170
+ for message in messages:
171
+ await stream.send_message(message)
172
+ await stream.end()
@@ -0,0 +1,32 @@
1
+ from abc import ABC
2
+ from collections.abc import AsyncIterable
3
+ from typing import (
4
+ Any,
5
+ Callable,
6
+ )
7
+
8
+ import grpclib
9
+ import grpclib.server
10
+
11
+
12
+ class ServiceBase(ABC):
13
+ """
14
+ Base class for async gRPC servers.
15
+ """
16
+
17
+ async def _call_rpc_handler_server_stream(
18
+ self,
19
+ handler: Callable,
20
+ stream: grpclib.server.Stream,
21
+ request: Any,
22
+ ) -> None:
23
+ response_iter = handler(request)
24
+ # check if response is actually an AsyncIterator
25
+ # this might be false if the method just returns without
26
+ # yielding at least once
27
+ # in that case, we just interpret it as an empty iterator
28
+ if isinstance(response_iter, AsyncIterable):
29
+ async for response_message in response_iter:
30
+ await stream.send_message(response_message)
31
+ else:
32
+ response_iter.close()
File without changes
@@ -0,0 +1,190 @@
1
+ import asyncio
2
+ from typing import (
3
+ AsyncIterable,
4
+ AsyncIterator,
5
+ Iterable,
6
+ Optional,
7
+ TypeVar,
8
+ Union,
9
+ )
10
+
11
+ T = TypeVar("T")
12
+
13
+
14
+ class ChannelClosed(Exception):
15
+ """
16
+ An exception raised on an attempt to send through a closed channel
17
+ """
18
+
19
+
20
+ class ChannelDone(Exception):
21
+ """
22
+ An exception raised on an attempt to send receive from a channel that is both closed
23
+ and empty.
24
+ """
25
+
26
+
27
+ class AsyncChannel(AsyncIterable[T]):
28
+ """
29
+ A buffered async channel for sending items between coroutines with FIFO ordering.
30
+
31
+ This makes decoupled bidirectional steaming gRPC requests easy if used like:
32
+
33
+ .. code-block:: python
34
+ client = GeneratedStub(grpclib_chan)
35
+ request_channel = await AsyncChannel()
36
+ # We can start be sending all the requests we already have
37
+ await request_channel.send_from([RequestObject(...), RequestObject(...)])
38
+ async for response in client.rpc_call(request_channel):
39
+ # The response iterator will remain active until the connection is closed
40
+ ...
41
+ # More items can be sent at any time
42
+ await request_channel.send(RequestObject(...))
43
+ ...
44
+ # The channel must be closed to complete the gRPC connection
45
+ request_channel.close()
46
+
47
+ Items can be sent through the channel by either:
48
+ - providing an iterable to the send_from method
49
+ - passing them to the send method one at a time
50
+
51
+ Items can be received from the channel by either:
52
+ - iterating over the channel with a for loop to get all items
53
+ - calling the receive method to get one item at a time
54
+
55
+ If the channel is empty then receivers will wait until either an item appears or the
56
+ channel is closed.
57
+
58
+ Once the channel is closed then subsequent attempt to send through the channel will
59
+ fail with a ChannelClosed exception.
60
+
61
+ When th channel is closed and empty then it is done, and further attempts to receive
62
+ from it will fail with a ChannelDone exception
63
+
64
+ If multiple coroutines receive from the channel concurrently, each item sent will be
65
+ received by only one of the receivers.
66
+
67
+ :param source:
68
+ An optional iterable will items that should be sent through the channel
69
+ immediately.
70
+ :param buffer_limit:
71
+ Limit the number of items that can be buffered in the channel, A value less than
72
+ 1 implies no limit. If the channel is full then attempts to send more items will
73
+ result in the sender waiting until an item is received from the channel.
74
+ :param close:
75
+ If set to True then the channel will automatically close after exhausting source
76
+ or immediately if no source is provided.
77
+ """
78
+
79
+ def __init__(self, *, buffer_limit: int = 0, close: bool = False):
80
+ self._queue: asyncio.Queue[T] = asyncio.Queue(buffer_limit)
81
+ self._closed = False
82
+ self._waiting_receivers: int = 0
83
+ # Track whether flush has been invoked so it can only happen once
84
+ self._flushed = False
85
+
86
+ def __aiter__(self) -> AsyncIterator[T]:
87
+ return self
88
+
89
+ async def __anext__(self) -> T:
90
+ if self.done():
91
+ raise StopAsyncIteration
92
+ self._waiting_receivers += 1
93
+ try:
94
+ result = await self._queue.get()
95
+ if result is self.__flush:
96
+ raise StopAsyncIteration
97
+ return result
98
+ finally:
99
+ self._waiting_receivers -= 1
100
+ self._queue.task_done()
101
+
102
+ def closed(self) -> bool:
103
+ """
104
+ Returns True if this channel is closed and no-longer accepting new items
105
+ """
106
+ return self._closed
107
+
108
+ def done(self) -> bool:
109
+ """
110
+ Check if this channel is done.
111
+
112
+ :return: True if this channel is closed and and has been drained of items in
113
+ which case any further attempts to receive an item from this channel will raise
114
+ a ChannelDone exception.
115
+ """
116
+ # After close the channel is not yet done until there is at least one waiting
117
+ # receiver per enqueued item.
118
+ return self._closed and self._queue.qsize() <= self._waiting_receivers
119
+
120
+ async def send_from(self, source: Union[Iterable[T], AsyncIterable[T]], close: bool = False) -> "AsyncChannel[T]":
121
+ """
122
+ Iterates the given [Async]Iterable and sends all the resulting items.
123
+ If close is set to True then subsequent send calls will be rejected with a
124
+ ChannelClosed exception.
125
+ :param source: an iterable of items to send
126
+ :param close:
127
+ if True then the channel will be closed after the source has been exhausted
128
+
129
+ """
130
+ if self._closed:
131
+ raise ChannelClosed("Cannot send through a closed channel")
132
+ if isinstance(source, AsyncIterable):
133
+ async for item in source:
134
+ await self._queue.put(item)
135
+ else:
136
+ for item in source:
137
+ await self._queue.put(item)
138
+ if close:
139
+ # Complete the closing process
140
+ self.close()
141
+ return self
142
+
143
+ async def send(self, item: T) -> "AsyncChannel[T]":
144
+ """
145
+ Send a single item over this channel.
146
+ :param item: The item to send
147
+ """
148
+ if self._closed:
149
+ raise ChannelClosed("Cannot send through a closed channel")
150
+ await self._queue.put(item)
151
+ return self
152
+
153
+ async def receive(self) -> Optional[T]:
154
+ """
155
+ Returns the next item from this channel when it becomes available,
156
+ or None if the channel is closed before another item is sent.
157
+ :return: An item from the channel
158
+ """
159
+ if self.done():
160
+ raise ChannelDone("Cannot receive from a closed channel")
161
+ self._waiting_receivers += 1
162
+ try:
163
+ result = await self._queue.get()
164
+ if result is self.__flush:
165
+ return None
166
+ return result
167
+ finally:
168
+ self._waiting_receivers -= 1
169
+ self._queue.task_done()
170
+
171
+ def close(self):
172
+ """
173
+ Close this channel to new items
174
+ """
175
+ self._closed = True
176
+ asyncio.ensure_future(self._flush_queue())
177
+
178
+ async def _flush_queue(self):
179
+ """
180
+ To be called after the channel is closed. Pushes a number of self.__flush
181
+ objects to the queue to ensure no waiting consumers get deadlocked.
182
+ """
183
+ if not self._flushed:
184
+ self._flushed = True
185
+ deadlocked_receivers = max(0, self._waiting_receivers - self._queue.qsize())
186
+ for _ in range(deadlocked_receivers):
187
+ await self._queue.put(self.__flush)
188
+
189
+ # A special signal object for flushing the queue when the channel is closed
190
+ __flush = object()
File without changes
File without changes
@@ -0,0 +1 @@
1
+ from betterproto2_compiler.lib.std.google.protobuf import *
@@ -0,0 +1 @@
1
+ from betterproto2_compiler.lib.std.google.protobuf.compiler import *
File without changes
File without changes