hat-event 0.9.27__cp310.cp311.cp312.cp313-abi3-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hat/event/__init__.py +1 -0
- hat/event/adminer/__init__.py +18 -0
- hat/event/adminer/client.py +124 -0
- hat/event/adminer/common.py +27 -0
- hat/event/adminer/server.py +111 -0
- hat/event/backends/__init__.py +0 -0
- hat/event/backends/dummy.py +49 -0
- hat/event/backends/lmdb/__init__.py +9 -0
- hat/event/backends/lmdb/backend.py +319 -0
- hat/event/backends/lmdb/common.py +277 -0
- hat/event/backends/lmdb/conditions.py +102 -0
- hat/event/backends/lmdb/convert/__init__.py +0 -0
- hat/event/backends/lmdb/convert/__main__.py +8 -0
- hat/event/backends/lmdb/convert/convert_v06_to_v07.py +213 -0
- hat/event/backends/lmdb/convert/convert_v07_to_v09.py +175 -0
- hat/event/backends/lmdb/convert/main.py +88 -0
- hat/event/backends/lmdb/convert/v06.py +216 -0
- hat/event/backends/lmdb/convert/v07.py +508 -0
- hat/event/backends/lmdb/convert/v09.py +50 -0
- hat/event/backends/lmdb/convert/version.py +63 -0
- hat/event/backends/lmdb/environment.py +100 -0
- hat/event/backends/lmdb/latestdb.py +116 -0
- hat/event/backends/lmdb/manager/__init__.py +0 -0
- hat/event/backends/lmdb/manager/__main__.py +8 -0
- hat/event/backends/lmdb/manager/common.py +45 -0
- hat/event/backends/lmdb/manager/copy.py +92 -0
- hat/event/backends/lmdb/manager/main.py +34 -0
- hat/event/backends/lmdb/manager/query.py +215 -0
- hat/event/backends/lmdb/refdb.py +234 -0
- hat/event/backends/lmdb/systemdb.py +102 -0
- hat/event/backends/lmdb/timeseriesdb.py +486 -0
- hat/event/backends/memory.py +178 -0
- hat/event/common/__init__.py +144 -0
- hat/event/common/backend.py +91 -0
- hat/event/common/collection/__init__.py +8 -0
- hat/event/common/collection/common.py +28 -0
- hat/event/common/collection/list.py +19 -0
- hat/event/common/collection/tree.py +62 -0
- hat/event/common/common.py +176 -0
- hat/event/common/encoder.py +305 -0
- hat/event/common/json_schema_repo.json +1 -0
- hat/event/common/matches.py +44 -0
- hat/event/common/module.py +142 -0
- hat/event/common/sbs_repo.json +1 -0
- hat/event/common/subscription/__init__.py +22 -0
- hat/event/common/subscription/_csubscription.abi3.pyd +0 -0
- hat/event/common/subscription/common.py +145 -0
- hat/event/common/subscription/csubscription.py +47 -0
- hat/event/common/subscription/pysubscription.py +97 -0
- hat/event/component.py +284 -0
- hat/event/eventer/__init__.py +28 -0
- hat/event/eventer/client.py +260 -0
- hat/event/eventer/common.py +27 -0
- hat/event/eventer/server.py +286 -0
- hat/event/manager/__init__.py +0 -0
- hat/event/manager/__main__.py +8 -0
- hat/event/manager/common.py +48 -0
- hat/event/manager/main.py +387 -0
- hat/event/server/__init__.py +0 -0
- hat/event/server/__main__.py +8 -0
- hat/event/server/adminer_server.py +43 -0
- hat/event/server/engine.py +216 -0
- hat/event/server/engine_runner.py +127 -0
- hat/event/server/eventer_client.py +205 -0
- hat/event/server/eventer_client_runner.py +152 -0
- hat/event/server/eventer_server.py +119 -0
- hat/event/server/main.py +84 -0
- hat/event/server/main_runner.py +212 -0
- hat_event-0.9.27.dist-info/LICENSE +202 -0
- hat_event-0.9.27.dist-info/METADATA +108 -0
- hat_event-0.9.27.dist-info/RECORD +73 -0
- hat_event-0.9.27.dist-info/WHEEL +7 -0
- hat_event-0.9.27.dist-info/entry_points.txt +5 -0
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
"""Eventer Client"""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Collection, Iterable
|
|
4
|
+
import asyncio
|
|
5
|
+
import logging
|
|
6
|
+
import typing
|
|
7
|
+
|
|
8
|
+
from hat import aio
|
|
9
|
+
from hat.drivers import chatter
|
|
10
|
+
from hat.drivers import tcp
|
|
11
|
+
|
|
12
|
+
from hat.event.eventer import common
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
mlog: logging.Logger = logging.getLogger(__name__)
|
|
16
|
+
"""Module logger"""
|
|
17
|
+
|
|
18
|
+
StatusCb: typing.TypeAlias = aio.AsyncCallable[['Client', common.Status],
|
|
19
|
+
None]
|
|
20
|
+
"""Status callback"""
|
|
21
|
+
|
|
22
|
+
EventsCb: typing.TypeAlias = aio.AsyncCallable[['Client',
|
|
23
|
+
Collection[common.Event]],
|
|
24
|
+
None]
|
|
25
|
+
"""Events callback"""
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class EventerInitError(Exception):
|
|
29
|
+
"""Eventer initialization error"""
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
async def connect(addr: tcp.Address,
|
|
33
|
+
client_name: str,
|
|
34
|
+
*,
|
|
35
|
+
client_token: str | None = None,
|
|
36
|
+
subscriptions: Iterable[common.EventType] = [],
|
|
37
|
+
server_id: common.ServerId | None = None,
|
|
38
|
+
persisted: bool = False,
|
|
39
|
+
status_cb: StatusCb | None = None,
|
|
40
|
+
events_cb: EventsCb | None = None,
|
|
41
|
+
**kwargs
|
|
42
|
+
) -> 'Client':
|
|
43
|
+
"""Connect to Eventer Server
|
|
44
|
+
|
|
45
|
+
Arguments `client_name` and optional `client_token` identifies eventer
|
|
46
|
+
client.
|
|
47
|
+
|
|
48
|
+
According to Event Server specification, each subscription is event
|
|
49
|
+
type identifier which can contain special subtypes ``?`` and ``*``.
|
|
50
|
+
Subtype ``?`` can occur at any position inside event type identifier
|
|
51
|
+
and is used as replacement for any single subtype. Subtype ``*`` is valid
|
|
52
|
+
only as last subtype in event type identifier and is used as replacement
|
|
53
|
+
for zero or more arbitrary subtypes.
|
|
54
|
+
|
|
55
|
+
If `subscriptions` is empty list, client doesn't subscribe for any events
|
|
56
|
+
and will not receive server's notifications.
|
|
57
|
+
|
|
58
|
+
If `server_id` is ``None``, client will receive all event notifications,
|
|
59
|
+
in accordance to `subscriptions`, regardless of event's server id. If
|
|
60
|
+
`server_id` is set, Eventer Server will only send events notifications
|
|
61
|
+
for events with provided server id.
|
|
62
|
+
|
|
63
|
+
If `persisted` is set to ``True``, Eventer Server will notify events
|
|
64
|
+
after they are persisted (flushed to disk). Otherwise, events are
|
|
65
|
+
notified immediately after registration.
|
|
66
|
+
|
|
67
|
+
Additional arguments are passed to `hat.chatter.connect` coroutine.
|
|
68
|
+
|
|
69
|
+
"""
|
|
70
|
+
client = Client()
|
|
71
|
+
client._status_cb = status_cb
|
|
72
|
+
client._events_cb = events_cb
|
|
73
|
+
client._loop = asyncio.get_running_loop()
|
|
74
|
+
client._conv_futures = {}
|
|
75
|
+
client._status = common.Status.STANDBY
|
|
76
|
+
|
|
77
|
+
client._conn = await chatter.connect(addr, **kwargs)
|
|
78
|
+
|
|
79
|
+
try:
|
|
80
|
+
req_data = {'clientName': client_name,
|
|
81
|
+
'clientToken': _optional_to_sbs(client_token),
|
|
82
|
+
'subscriptions': [list(i) for i in subscriptions],
|
|
83
|
+
'serverId': _optional_to_sbs(server_id),
|
|
84
|
+
'persisted': persisted}
|
|
85
|
+
conv = await common.send_msg(conn=client._conn,
|
|
86
|
+
msg_type='HatEventer.MsgInitReq',
|
|
87
|
+
msg_data=req_data,
|
|
88
|
+
last=False)
|
|
89
|
+
|
|
90
|
+
res, res_type, res_data = await common.receive_msg(client._conn)
|
|
91
|
+
if res_type != 'HatEventer.MsgInitRes' or res.conv != conv:
|
|
92
|
+
raise Exception('invalid init response')
|
|
93
|
+
|
|
94
|
+
if res_data[0] == 'success':
|
|
95
|
+
client._status = common.Status(common.status_from_sbs(res_data[1]))
|
|
96
|
+
|
|
97
|
+
elif res_data[0] == 'error':
|
|
98
|
+
raise EventerInitError(res_data[1])
|
|
99
|
+
|
|
100
|
+
else:
|
|
101
|
+
raise ValueError('unsupported init response')
|
|
102
|
+
|
|
103
|
+
client.async_group.spawn(client._receive_loop)
|
|
104
|
+
|
|
105
|
+
except BaseException:
|
|
106
|
+
await aio.uncancellable(client.async_close())
|
|
107
|
+
raise
|
|
108
|
+
|
|
109
|
+
return client
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
class Client(aio.Resource):
|
|
113
|
+
"""Eventer client
|
|
114
|
+
|
|
115
|
+
For creating new client see `connect` coroutine.
|
|
116
|
+
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
@property
|
|
120
|
+
def async_group(self) -> aio.Group:
|
|
121
|
+
"""Async group"""
|
|
122
|
+
return self._conn.async_group
|
|
123
|
+
|
|
124
|
+
@property
|
|
125
|
+
def status(self) -> common.Status:
|
|
126
|
+
"""Status"""
|
|
127
|
+
return self._status
|
|
128
|
+
|
|
129
|
+
async def register(self,
|
|
130
|
+
events: Collection[common.RegisterEvent],
|
|
131
|
+
with_response: bool = False
|
|
132
|
+
) -> Collection[common.Event] | None:
|
|
133
|
+
"""Register events and optionally wait for response
|
|
134
|
+
|
|
135
|
+
If `with_response` is ``True``, this coroutine returns list of events
|
|
136
|
+
or ``None`` if registration failure occurred.
|
|
137
|
+
|
|
138
|
+
"""
|
|
139
|
+
msg_data = [common.register_event_to_sbs(i) for i in events]
|
|
140
|
+
conv = await common.send_msg(conn=self._conn,
|
|
141
|
+
msg_type='HatEventer.MsgRegisterReq',
|
|
142
|
+
msg_data=msg_data,
|
|
143
|
+
last=not with_response)
|
|
144
|
+
|
|
145
|
+
if with_response:
|
|
146
|
+
return await self._wait_conv_res(conv)
|
|
147
|
+
|
|
148
|
+
async def query(self,
|
|
149
|
+
params: common.QueryParams
|
|
150
|
+
) -> common.QueryResult:
|
|
151
|
+
"""Query events from server"""
|
|
152
|
+
msg_data = common.query_params_to_sbs(params)
|
|
153
|
+
conv = await common.send_msg(conn=self._conn,
|
|
154
|
+
msg_type='HatEventer.MsgQueryReq',
|
|
155
|
+
msg_data=msg_data,
|
|
156
|
+
last=False)
|
|
157
|
+
|
|
158
|
+
return await self._wait_conv_res(conv)
|
|
159
|
+
|
|
160
|
+
async def _receive_loop(self):
|
|
161
|
+
mlog.debug("starting receive loop")
|
|
162
|
+
try:
|
|
163
|
+
while True:
|
|
164
|
+
mlog.debug("waiting for incoming message")
|
|
165
|
+
msg, msg_type, msg_data = await common.receive_msg(self._conn)
|
|
166
|
+
|
|
167
|
+
if msg_type == 'HatEventer.MsgStatusNotify':
|
|
168
|
+
mlog.debug("received status notification")
|
|
169
|
+
await self._process_msg_status_notify(msg, msg_data)
|
|
170
|
+
|
|
171
|
+
elif msg_type == 'HatEventer.MsgEventsNotify':
|
|
172
|
+
mlog.debug("received events notification")
|
|
173
|
+
await self._process_msg_events_notify(msg, msg_data)
|
|
174
|
+
|
|
175
|
+
elif msg_type == 'HatEventer.MsgRegisterRes':
|
|
176
|
+
mlog.debug("received register response")
|
|
177
|
+
await self._process_msg_register_res(msg, msg_data)
|
|
178
|
+
|
|
179
|
+
elif msg_type == 'HatEventer.MsgQueryRes':
|
|
180
|
+
mlog.debug("received query response")
|
|
181
|
+
await self._process_msg_query_res(msg, msg_data)
|
|
182
|
+
|
|
183
|
+
else:
|
|
184
|
+
raise Exception("unsupported message type")
|
|
185
|
+
|
|
186
|
+
except ConnectionError:
|
|
187
|
+
pass
|
|
188
|
+
|
|
189
|
+
except Exception as e:
|
|
190
|
+
mlog.error("read loop error: %s", e, exc_info=e)
|
|
191
|
+
|
|
192
|
+
finally:
|
|
193
|
+
mlog.debug("stopping receive loop")
|
|
194
|
+
self.close()
|
|
195
|
+
|
|
196
|
+
for future in self._conv_futures.values():
|
|
197
|
+
if not future.done():
|
|
198
|
+
future.set_exception(ConnectionError())
|
|
199
|
+
|
|
200
|
+
async def _wait_conv_res(self, conv):
|
|
201
|
+
if not self.is_open:
|
|
202
|
+
raise ConnectionError()
|
|
203
|
+
|
|
204
|
+
future = self._loop.create_future()
|
|
205
|
+
self._conv_futures[conv] = future
|
|
206
|
+
|
|
207
|
+
try:
|
|
208
|
+
return await future
|
|
209
|
+
|
|
210
|
+
finally:
|
|
211
|
+
self._conv_futures.pop(conv, None)
|
|
212
|
+
|
|
213
|
+
async def _process_msg_status_notify(self, msg, msg_data):
|
|
214
|
+
self._status = common.status_from_sbs(msg_data)
|
|
215
|
+
|
|
216
|
+
if self._status_cb:
|
|
217
|
+
await aio.call(self._status_cb, self, self._status)
|
|
218
|
+
|
|
219
|
+
async def _process_msg_events_notify(self, msg, msg_data):
|
|
220
|
+
events = [common.event_from_sbs(event) for event in msg_data]
|
|
221
|
+
|
|
222
|
+
if self._events_cb:
|
|
223
|
+
await aio.call(self._events_cb, self, events)
|
|
224
|
+
|
|
225
|
+
if msg.last:
|
|
226
|
+
return
|
|
227
|
+
|
|
228
|
+
await common.send_msg(conn=self._conn,
|
|
229
|
+
msg_type='HatEventer.MsgEventsAck',
|
|
230
|
+
msg_data=None,
|
|
231
|
+
conv=msg.conv)
|
|
232
|
+
|
|
233
|
+
async def _process_msg_register_res(self, msg, msg_data):
|
|
234
|
+
if msg_data[0] == 'events':
|
|
235
|
+
result = [common.event_from_sbs(event) for event in msg_data[1]]
|
|
236
|
+
|
|
237
|
+
elif msg_data[0] == 'failure':
|
|
238
|
+
result = None
|
|
239
|
+
|
|
240
|
+
else:
|
|
241
|
+
raise ValueError('unsupported register response')
|
|
242
|
+
|
|
243
|
+
future = self._conv_futures.get(msg.conv)
|
|
244
|
+
if not future or future.done():
|
|
245
|
+
return
|
|
246
|
+
|
|
247
|
+
future.set_result(result)
|
|
248
|
+
|
|
249
|
+
async def _process_msg_query_res(self, msg, msg_data):
|
|
250
|
+
result = common.query_result_from_sbs(msg_data)
|
|
251
|
+
|
|
252
|
+
future = self._conv_futures.get(msg.conv)
|
|
253
|
+
if not future or future.done():
|
|
254
|
+
return
|
|
255
|
+
|
|
256
|
+
future.set_result(result)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def _optional_to_sbs(value):
|
|
260
|
+
return ('value', value) if value is not None else ('none', None)
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
from hat.event.common import * # NOQA
|
|
2
|
+
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from hat import sbs
|
|
6
|
+
from hat.drivers import chatter
|
|
7
|
+
|
|
8
|
+
from hat.event.common import sbs_repo
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
MsgType: typing.TypeAlias = str
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
async def send_msg(conn: chatter.Connection,
|
|
15
|
+
msg_type: MsgType,
|
|
16
|
+
msg_data: sbs.Data,
|
|
17
|
+
**kwargs
|
|
18
|
+
) -> chatter.Conversation:
|
|
19
|
+
msg = sbs_repo.encode(msg_type, msg_data)
|
|
20
|
+
return await conn.send(chatter.Data(msg_type, msg), **kwargs)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
async def receive_msg(conn: chatter.Connection
|
|
24
|
+
) -> tuple[chatter.Msg, MsgType, sbs.Data]:
|
|
25
|
+
msg = await conn.receive()
|
|
26
|
+
msg_data = sbs_repo.decode(msg.data.type, msg.data.data)
|
|
27
|
+
return msg, msg.data.type, msg_data
|
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
from collections.abc import Collection
|
|
2
|
+
import asyncio
|
|
3
|
+
import collections
|
|
4
|
+
import contextlib
|
|
5
|
+
import itertools
|
|
6
|
+
import logging
|
|
7
|
+
import typing
|
|
8
|
+
|
|
9
|
+
from hat import aio
|
|
10
|
+
from hat.drivers import chatter
|
|
11
|
+
from hat.drivers import tcp
|
|
12
|
+
|
|
13
|
+
from hat.event.eventer import common
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
mlog: logging.Logger = logging.getLogger(__name__)
|
|
17
|
+
"""Module logger"""
|
|
18
|
+
|
|
19
|
+
ConnectionId: typing.TypeAlias = int
|
|
20
|
+
"""Connection identifier"""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ConnectionInfo(typing.NamedTuple):
|
|
24
|
+
id: ConnectionId
|
|
25
|
+
client_name: str
|
|
26
|
+
client_token: str | None
|
|
27
|
+
subscription: common.Subscription
|
|
28
|
+
server_id: int | None
|
|
29
|
+
persisted: bool
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
ConnectionCb: typing.TypeAlias = aio.AsyncCallable[[ConnectionInfo], None]
|
|
33
|
+
"""Connected/disconnected callback"""
|
|
34
|
+
|
|
35
|
+
RegisterCb: typing.TypeAlias = aio.AsyncCallable[
|
|
36
|
+
[ConnectionInfo, Collection[common.RegisterEvent]],
|
|
37
|
+
Collection[common.Event] | None]
|
|
38
|
+
"""Register callback"""
|
|
39
|
+
|
|
40
|
+
QueryCb: typing.TypeAlias = aio.AsyncCallable[
|
|
41
|
+
[ConnectionInfo, common.QueryParams],
|
|
42
|
+
common.QueryResult]
|
|
43
|
+
"""Query callback"""
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
async def listen(addr: tcp.Address,
|
|
47
|
+
*,
|
|
48
|
+
status: common.Status = common.Status.STANDBY,
|
|
49
|
+
connected_cb: ConnectionCb | None = None,
|
|
50
|
+
disconnected_cb: ConnectionCb | None = None,
|
|
51
|
+
register_cb: RegisterCb | None = None,
|
|
52
|
+
query_cb: QueryCb | None = None,
|
|
53
|
+
close_timeout: float = 0.5,
|
|
54
|
+
**kwargs
|
|
55
|
+
) -> 'Server':
|
|
56
|
+
"""Create listening Eventer Server instance"""
|
|
57
|
+
server = Server()
|
|
58
|
+
server._status = status
|
|
59
|
+
server._connected_cb = connected_cb
|
|
60
|
+
server._disconnected_cb = disconnected_cb
|
|
61
|
+
server._register_cb = register_cb
|
|
62
|
+
server._query_cb = query_cb
|
|
63
|
+
server._close_timeout = close_timeout
|
|
64
|
+
server._loop = asyncio.get_running_loop()
|
|
65
|
+
server._next_conn_ids = itertools.count(1)
|
|
66
|
+
server._conn_infos = {}
|
|
67
|
+
server._conn_conv_futures = {}
|
|
68
|
+
|
|
69
|
+
server._srv = await chatter.listen(server._connection_loop, addr, **kwargs)
|
|
70
|
+
mlog.debug("listening on %s", addr)
|
|
71
|
+
|
|
72
|
+
return server
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
class Server(aio.Resource):
|
|
76
|
+
|
|
77
|
+
@property
|
|
78
|
+
def async_group(self) -> aio.Group:
|
|
79
|
+
"""Async group"""
|
|
80
|
+
return self._srv.async_group
|
|
81
|
+
|
|
82
|
+
def get_conn_infos(self) -> list[ConnectionInfo]:
|
|
83
|
+
"""Get connection infos"""
|
|
84
|
+
return list(self._conn_infos.values())
|
|
85
|
+
|
|
86
|
+
async def set_status(self, status: common.Status):
|
|
87
|
+
"""Set status"""
|
|
88
|
+
if self._status == status:
|
|
89
|
+
return
|
|
90
|
+
|
|
91
|
+
self._status = status
|
|
92
|
+
|
|
93
|
+
for conn in list(self._conn_infos.keys()):
|
|
94
|
+
await self._notify_status(conn)
|
|
95
|
+
|
|
96
|
+
async def notify_events(self,
|
|
97
|
+
events: Collection[common.Event],
|
|
98
|
+
persisted: bool,
|
|
99
|
+
with_ack: bool = False):
|
|
100
|
+
"""Notify events to clients"""
|
|
101
|
+
conn_conn_events = collections.deque()
|
|
102
|
+
|
|
103
|
+
for conn, info in self._conn_infos.items():
|
|
104
|
+
if info.persisted != persisted:
|
|
105
|
+
continue
|
|
106
|
+
|
|
107
|
+
conn_events = collections.deque(
|
|
108
|
+
event for event in events
|
|
109
|
+
if (info.subscription.matches(event.type) and
|
|
110
|
+
(info.server_id is None or
|
|
111
|
+
info.server_id == event.id.server)))
|
|
112
|
+
if not conn_events:
|
|
113
|
+
continue
|
|
114
|
+
|
|
115
|
+
conn_conn_events.append((conn, conn_events))
|
|
116
|
+
|
|
117
|
+
if not conn_conn_events:
|
|
118
|
+
return
|
|
119
|
+
|
|
120
|
+
if with_ack:
|
|
121
|
+
await asyncio.wait([
|
|
122
|
+
self.async_group.spawn(self._notify_events, conn, conn_events,
|
|
123
|
+
True)
|
|
124
|
+
for conn, conn_events in conn_conn_events])
|
|
125
|
+
|
|
126
|
+
else:
|
|
127
|
+
for conn, conn_events in conn_conn_events:
|
|
128
|
+
await self._notify_events(conn, conn_events, False)
|
|
129
|
+
|
|
130
|
+
async def _connection_loop(self, conn):
|
|
131
|
+
mlog.debug("starting connection loop")
|
|
132
|
+
conn_id = next(self._next_conn_ids)
|
|
133
|
+
info = None
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
req, req_type, req_data = await common.receive_msg(conn)
|
|
137
|
+
if req_type != 'HatEventer.MsgInitReq':
|
|
138
|
+
raise Exception('invalid init request type')
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
info = ConnectionInfo(
|
|
142
|
+
id=conn_id,
|
|
143
|
+
client_name=req_data['clientName'],
|
|
144
|
+
client_token=_optional_from_sbs(req_data['clientToken']),
|
|
145
|
+
subscription=common.create_subscription(
|
|
146
|
+
tuple(i) for i in req_data['subscriptions']),
|
|
147
|
+
server_id=_optional_from_sbs(req_data['serverId']),
|
|
148
|
+
persisted=req_data['persisted'])
|
|
149
|
+
|
|
150
|
+
if self._connected_cb:
|
|
151
|
+
await aio.call(self._connected_cb, info)
|
|
152
|
+
|
|
153
|
+
res_data = 'success', common.status_to_sbs(self._status)
|
|
154
|
+
self._conn_infos[conn] = info
|
|
155
|
+
|
|
156
|
+
except Exception as e:
|
|
157
|
+
info = None
|
|
158
|
+
res_data = 'error', str(e)
|
|
159
|
+
|
|
160
|
+
mlog.debug("sending init response %s", res_data[0])
|
|
161
|
+
await common.send_msg(conn, 'HatEventer.MsgInitRes', res_data,
|
|
162
|
+
conv=req.conv)
|
|
163
|
+
|
|
164
|
+
if res_data[0] != 'success':
|
|
165
|
+
with contextlib.suppress(asyncio.TimeoutError):
|
|
166
|
+
await aio.wait_for(conn.wait_closing(),
|
|
167
|
+
self._close_timeout)
|
|
168
|
+
return
|
|
169
|
+
|
|
170
|
+
while True:
|
|
171
|
+
mlog.debug("waiting for incomming messages")
|
|
172
|
+
msg, msg_type, msg_data = await common.receive_msg(conn)
|
|
173
|
+
|
|
174
|
+
if msg_type == 'HatEventer.MsgEventsAck':
|
|
175
|
+
mlog.debug("received events ack")
|
|
176
|
+
future = self._conn_conv_futures.get((conn, msg.conv))
|
|
177
|
+
if future and not future.done():
|
|
178
|
+
future.set_result(None)
|
|
179
|
+
|
|
180
|
+
elif msg_type == 'HatEventer.MsgRegisterReq':
|
|
181
|
+
mlog.debug("received register request")
|
|
182
|
+
await self._process_msg_register(conn, info, msg, msg_data)
|
|
183
|
+
|
|
184
|
+
elif msg_type == 'HatEventer.MsgQueryReq':
|
|
185
|
+
mlog.debug("received query request")
|
|
186
|
+
await self._process_msg_query(conn, info, msg, msg_data)
|
|
187
|
+
|
|
188
|
+
else:
|
|
189
|
+
raise Exception('unsupported message type')
|
|
190
|
+
|
|
191
|
+
except ConnectionError:
|
|
192
|
+
pass
|
|
193
|
+
|
|
194
|
+
except Exception as e:
|
|
195
|
+
mlog.error("on connection error: %s", e, exc_info=e)
|
|
196
|
+
|
|
197
|
+
finally:
|
|
198
|
+
mlog.debug("stopping connection loop")
|
|
199
|
+
conn.close()
|
|
200
|
+
self._conn_infos.pop(conn, None)
|
|
201
|
+
|
|
202
|
+
for future in self._conn_conv_futures.values():
|
|
203
|
+
if not future.done():
|
|
204
|
+
future.set_exception(ConnectionError())
|
|
205
|
+
|
|
206
|
+
if self._disconnected_cb and info:
|
|
207
|
+
with contextlib.suppress(Exception):
|
|
208
|
+
await aio.call(self._disconnected_cb, info)
|
|
209
|
+
|
|
210
|
+
async def _process_msg_register(self, conn, info, req, req_data):
|
|
211
|
+
register_events = [common.register_event_from_sbs(i)
|
|
212
|
+
for i in req_data]
|
|
213
|
+
|
|
214
|
+
if self._register_cb:
|
|
215
|
+
events = await aio.call(self._register_cb, info, register_events)
|
|
216
|
+
|
|
217
|
+
else:
|
|
218
|
+
events = None
|
|
219
|
+
|
|
220
|
+
if req.last:
|
|
221
|
+
return
|
|
222
|
+
|
|
223
|
+
if events is not None:
|
|
224
|
+
res_data = 'events', [common.event_to_sbs(event)
|
|
225
|
+
for event in events]
|
|
226
|
+
|
|
227
|
+
else:
|
|
228
|
+
res_data = 'failure', None
|
|
229
|
+
|
|
230
|
+
await common.send_msg(conn, 'HatEventer.MsgRegisterRes', res_data,
|
|
231
|
+
conv=req.conv)
|
|
232
|
+
|
|
233
|
+
async def _process_msg_query(self, conn, info, req, req_data):
|
|
234
|
+
params = common.query_params_from_sbs(req_data)
|
|
235
|
+
|
|
236
|
+
if self._query_cb:
|
|
237
|
+
result = await aio.call(self._query_cb, info, params)
|
|
238
|
+
|
|
239
|
+
else:
|
|
240
|
+
result = common.QueryResult(events=[],
|
|
241
|
+
more_follows=False)
|
|
242
|
+
|
|
243
|
+
res_data = common.query_result_to_sbs(result)
|
|
244
|
+
await common.send_msg(conn, 'HatEventer.MsgQueryRes', res_data,
|
|
245
|
+
conv=req.conv)
|
|
246
|
+
|
|
247
|
+
async def _notify_status(self, conn):
|
|
248
|
+
try:
|
|
249
|
+
msg_data = common.status_to_sbs(self._status)
|
|
250
|
+
await common.send_msg(conn, 'HatEventer.MsgStatusNotify', msg_data)
|
|
251
|
+
|
|
252
|
+
except ConnectionError:
|
|
253
|
+
pass
|
|
254
|
+
|
|
255
|
+
except Exception as e:
|
|
256
|
+
mlog.error("notify status error: %s", e, exc_info=e)
|
|
257
|
+
|
|
258
|
+
async def _notify_events(self, conn, events, with_ack):
|
|
259
|
+
try:
|
|
260
|
+
msg_data = [common.event_to_sbs(event) for event in events]
|
|
261
|
+
conv = await common.send_msg(conn,
|
|
262
|
+
'HatEventer.MsgEventsNotify',
|
|
263
|
+
msg_data,
|
|
264
|
+
last=not with_ack)
|
|
265
|
+
|
|
266
|
+
if not with_ack:
|
|
267
|
+
return
|
|
268
|
+
|
|
269
|
+
future = self._loop.create_future()
|
|
270
|
+
self._conn_conv_futures[(conn, conv)] = future
|
|
271
|
+
|
|
272
|
+
try:
|
|
273
|
+
await future
|
|
274
|
+
|
|
275
|
+
finally:
|
|
276
|
+
self._conn_conv_futures.pop((conn, conv))
|
|
277
|
+
|
|
278
|
+
except ConnectionError:
|
|
279
|
+
pass
|
|
280
|
+
|
|
281
|
+
except Exception as e:
|
|
282
|
+
mlog.error("notify events error: %s", e, exc_info=e)
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
def _optional_from_sbs(data):
|
|
286
|
+
return data[1] if data[0] == 'value' else None
|
|
File without changes
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from hat.event.common import * # NOQA
|
|
2
|
+
|
|
3
|
+
from hat import json
|
|
4
|
+
|
|
5
|
+
from hat.event.common import (Event,
|
|
6
|
+
QueryResult,
|
|
7
|
+
EventPayloadBinary,
|
|
8
|
+
EventPayloadJson)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def event_to_json(event: Event) -> json.Data:
|
|
12
|
+
return {
|
|
13
|
+
'id': _event_id_to_json(event.id),
|
|
14
|
+
'type': list(event.type),
|
|
15
|
+
'timestamp': _timestamp_to_json(event.timestamp),
|
|
16
|
+
'source_timestamp': (_timestamp_to_json(event.source_timestamp)
|
|
17
|
+
if event.source_timestamp else None),
|
|
18
|
+
'payload': (_event_payload_to_json(event.payload)
|
|
19
|
+
if event.payload else None)}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def query_result_to_json(result: QueryResult) -> json.Data:
|
|
23
|
+
return {'events': [event_to_json(event) for event in result.events],
|
|
24
|
+
'more_follows': result.more_follows}
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _event_id_to_json(event_id):
|
|
28
|
+
return {'server': event_id.server,
|
|
29
|
+
'session': event_id.session,
|
|
30
|
+
'instance': event_id.instance}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _timestamp_to_json(timestamp):
|
|
34
|
+
return {'s': timestamp.s,
|
|
35
|
+
'us': timestamp.us}
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def _event_payload_to_json(payload):
|
|
39
|
+
if isinstance(payload, EventPayloadBinary):
|
|
40
|
+
return {'data_type': 'binary',
|
|
41
|
+
'binary_type': payload.type,
|
|
42
|
+
'data': bytes(payload.data).hex()}
|
|
43
|
+
|
|
44
|
+
if isinstance(payload, EventPayloadJson):
|
|
45
|
+
return {'data_type': 'json',
|
|
46
|
+
'data': payload.data}
|
|
47
|
+
|
|
48
|
+
raise ValueError('unuspported payload type')
|