hat-event 0.9.27__cp310.cp311.cp312.cp313-abi3-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. hat/event/__init__.py +1 -0
  2. hat/event/adminer/__init__.py +18 -0
  3. hat/event/adminer/client.py +124 -0
  4. hat/event/adminer/common.py +27 -0
  5. hat/event/adminer/server.py +111 -0
  6. hat/event/backends/__init__.py +0 -0
  7. hat/event/backends/dummy.py +49 -0
  8. hat/event/backends/lmdb/__init__.py +9 -0
  9. hat/event/backends/lmdb/backend.py +319 -0
  10. hat/event/backends/lmdb/common.py +277 -0
  11. hat/event/backends/lmdb/conditions.py +102 -0
  12. hat/event/backends/lmdb/convert/__init__.py +0 -0
  13. hat/event/backends/lmdb/convert/__main__.py +8 -0
  14. hat/event/backends/lmdb/convert/convert_v06_to_v07.py +213 -0
  15. hat/event/backends/lmdb/convert/convert_v07_to_v09.py +175 -0
  16. hat/event/backends/lmdb/convert/main.py +88 -0
  17. hat/event/backends/lmdb/convert/v06.py +216 -0
  18. hat/event/backends/lmdb/convert/v07.py +508 -0
  19. hat/event/backends/lmdb/convert/v09.py +50 -0
  20. hat/event/backends/lmdb/convert/version.py +63 -0
  21. hat/event/backends/lmdb/environment.py +100 -0
  22. hat/event/backends/lmdb/latestdb.py +116 -0
  23. hat/event/backends/lmdb/manager/__init__.py +0 -0
  24. hat/event/backends/lmdb/manager/__main__.py +8 -0
  25. hat/event/backends/lmdb/manager/common.py +45 -0
  26. hat/event/backends/lmdb/manager/copy.py +92 -0
  27. hat/event/backends/lmdb/manager/main.py +34 -0
  28. hat/event/backends/lmdb/manager/query.py +215 -0
  29. hat/event/backends/lmdb/refdb.py +234 -0
  30. hat/event/backends/lmdb/systemdb.py +102 -0
  31. hat/event/backends/lmdb/timeseriesdb.py +486 -0
  32. hat/event/backends/memory.py +178 -0
  33. hat/event/common/__init__.py +144 -0
  34. hat/event/common/backend.py +91 -0
  35. hat/event/common/collection/__init__.py +8 -0
  36. hat/event/common/collection/common.py +28 -0
  37. hat/event/common/collection/list.py +19 -0
  38. hat/event/common/collection/tree.py +62 -0
  39. hat/event/common/common.py +176 -0
  40. hat/event/common/encoder.py +305 -0
  41. hat/event/common/json_schema_repo.json +1 -0
  42. hat/event/common/matches.py +44 -0
  43. hat/event/common/module.py +142 -0
  44. hat/event/common/sbs_repo.json +1 -0
  45. hat/event/common/subscription/__init__.py +22 -0
  46. hat/event/common/subscription/_csubscription.abi3.pyd +0 -0
  47. hat/event/common/subscription/common.py +145 -0
  48. hat/event/common/subscription/csubscription.py +47 -0
  49. hat/event/common/subscription/pysubscription.py +97 -0
  50. hat/event/component.py +284 -0
  51. hat/event/eventer/__init__.py +28 -0
  52. hat/event/eventer/client.py +260 -0
  53. hat/event/eventer/common.py +27 -0
  54. hat/event/eventer/server.py +286 -0
  55. hat/event/manager/__init__.py +0 -0
  56. hat/event/manager/__main__.py +8 -0
  57. hat/event/manager/common.py +48 -0
  58. hat/event/manager/main.py +387 -0
  59. hat/event/server/__init__.py +0 -0
  60. hat/event/server/__main__.py +8 -0
  61. hat/event/server/adminer_server.py +43 -0
  62. hat/event/server/engine.py +216 -0
  63. hat/event/server/engine_runner.py +127 -0
  64. hat/event/server/eventer_client.py +205 -0
  65. hat/event/server/eventer_client_runner.py +152 -0
  66. hat/event/server/eventer_server.py +119 -0
  67. hat/event/server/main.py +84 -0
  68. hat/event/server/main_runner.py +212 -0
  69. hat_event-0.9.27.dist-info/LICENSE +202 -0
  70. hat_event-0.9.27.dist-info/METADATA +108 -0
  71. hat_event-0.9.27.dist-info/RECORD +73 -0
  72. hat_event-0.9.27.dist-info/WHEEL +7 -0
  73. hat_event-0.9.27.dist-info/entry_points.txt +5 -0
@@ -0,0 +1,387 @@
1
+ from collections.abc import Collection
2
+ from pathlib import Path
3
+ import argparse
4
+ import asyncio
5
+ import contextlib
6
+ import sys
7
+
8
+ from hat import aio
9
+ from hat import json
10
+ from hat.drivers import tcp
11
+
12
+ from hat.event import eventer
13
+ from hat.event.manager import common
14
+
15
+
16
+ def create_argument_parser() -> argparse.ArgumentParser:
17
+ parser = argparse.ArgumentParser()
18
+ parser.add_argument(
19
+ '--host', type=str, default='127.0.0.1',
20
+ help="server host name (default '127.0.0.1')")
21
+ parser.add_argument(
22
+ '--port', type=int, default='23012',
23
+ help="server TCP port (default 23012)")
24
+ parser.add_argument(
25
+ '--client-name', metavar='NAME', type=str, default='manager',
26
+ help="client name (default 'manager')")
27
+ parser.add_argument(
28
+ '--client-token', metavar='TOKEN', type=str, default=None,
29
+ help="client token")
30
+ subparsers = parser.add_subparsers(
31
+ title='actions', dest='action', required=True)
32
+
33
+ register_parser = subparsers.add_parser(
34
+ 'register', description="register new event")
35
+ register_parser.add_argument(
36
+ '--source-timestamp', metavar='TIMESTAMP', type=_parse_timestamp,
37
+ default=None,
38
+ help="source timestamp")
39
+ register_parser.add_argument(
40
+ '--payload-type', metavar='TYPE',
41
+ choices=['json', 'yaml', 'toml', 'binary', 'none'],
42
+ default=None,
43
+ help="payload type")
44
+ register_parser.add_argument(
45
+ '--binary-type', metavar='TYPE', type=str, default='',
46
+ help="binary payload type (default '')")
47
+ register_parser.add_argument(
48
+ '--payload-path', metavar='PATH', type=Path, default=Path('-'),
49
+ help="payload file path or '-' for stdin (default '-')")
50
+ register_parser.add_argument(
51
+ 'event_type', metavar='EVENT_TYPE', type=_parse_event_type,
52
+ help="event type where segments are delimited by '/'")
53
+
54
+ query_parser = subparsers.add_parser(
55
+ 'query', description="query events")
56
+ query_subparsers = query_parser.add_subparsers(
57
+ title='query types', dest='query_type', required=True)
58
+
59
+ query_latest_parser = query_subparsers.add_parser(
60
+ 'latest', description="query latest events")
61
+ query_latest_parser.add_argument(
62
+ '--event-types', metavar='EVENT_TYPE', type=_parse_event_type,
63
+ default=None, nargs='*',
64
+ help='query event types')
65
+
66
+ query_timeseries_parser = query_subparsers.add_parser(
67
+ 'timeseries', description="query timeseries events")
68
+ query_timeseries_parser.add_argument(
69
+ '--event-types', metavar='EVENT_TYPE', type=_parse_event_type,
70
+ default=None, nargs='*',
71
+ help='query event types')
72
+ query_timeseries_parser.add_argument(
73
+ '--t-from', metavar='TIMESTAMP', type=_parse_timestamp, default=None,
74
+ help="from timestamp")
75
+ query_timeseries_parser.add_argument(
76
+ '--t-to', metavar='TIMESTAMP', type=_parse_timestamp, default=None,
77
+ help="to timestamp")
78
+ query_timeseries_parser.add_argument(
79
+ '--source-t-from', metavar='TIMESTAMP', type=_parse_timestamp,
80
+ default=None,
81
+ help="from source timestamp")
82
+ query_timeseries_parser.add_argument(
83
+ '--source-t-to', metavar='TIMESTAMP', type=_parse_timestamp,
84
+ default=None,
85
+ help="to source timestamp")
86
+ query_timeseries_parser.add_argument(
87
+ '--order', type=_parse_order, choices=[i.name for i in common.Order],
88
+ default=common.Order.DESCENDING,
89
+ help="order (default 'DESCENDING')")
90
+ query_timeseries_parser.add_argument(
91
+ '--order-by', type=_parse_order_by,
92
+ choices=[i.name for i in common.OrderBy],
93
+ default=common.OrderBy.TIMESTAMP,
94
+ help="order (default 'TIMESTAMP')")
95
+ query_timeseries_parser.add_argument(
96
+ '--max-results', metavar='N', type=int, default=None,
97
+ help="maximum number of results")
98
+ query_timeseries_parser.add_argument(
99
+ '--last-event-id', metavar='SERVER_ID,SESSION_ID,INSTANCE_ID',
100
+ type=_parse_event_id, default=None,
101
+ help="last event id")
102
+
103
+ query_server_parser = query_subparsers.add_parser(
104
+ 'server', description="query server events")
105
+ query_server_parser.add_argument(
106
+ '--persisted', action='store_true',
107
+ help="persisted events")
108
+ query_server_parser.add_argument(
109
+ '--max-results', metavar='N', type=int, default=None,
110
+ help="maximum number of results")
111
+ query_server_parser.add_argument(
112
+ '--last-event-id', metavar='SERVER_ID,SESSION_ID,INSTANCE_ID',
113
+ type=_parse_event_id, default=None,
114
+ help="last event id")
115
+ query_server_parser.add_argument(
116
+ 'server_id', metavar='SERVER_ID', type=int,
117
+ help="server id")
118
+
119
+ subscribe_parser = subparsers.add_parser(
120
+ 'subscribe', description="watch newly registered events")
121
+ subscribe_parser.add_argument(
122
+ '--server-id', type=int, default=None,
123
+ help="server id")
124
+ subscribe_parser.add_argument(
125
+ '--persisted', action='store_true',
126
+ help="persisted events")
127
+ subscribe_parser.add_argument(
128
+ 'event_types', metavar='EVENT_TYPE', type=_parse_event_type, nargs='*',
129
+ help='query event type')
130
+
131
+ server_parser = subparsers.add_parser(
132
+ 'server', description="run manager server with web ui")
133
+ server_parser.add_argument(
134
+ '--server-id', type=int, default=None,
135
+ help="server id")
136
+ server_parser.add_argument(
137
+ '--persisted', action='store_true',
138
+ help="persisted events")
139
+ server_parser.add_argument(
140
+ 'event_types', metavar='EVENT_TYPE', type=_parse_event_type, nargs='*',
141
+ help='query event type')
142
+
143
+ return parser
144
+
145
+
146
+ def main():
147
+ parser = create_argument_parser()
148
+ args = parser.parse_args()
149
+
150
+ addr = tcp.Address(args.host, args.port)
151
+
152
+ if args.action == 'register':
153
+ register_event = common.RegisterEvent(
154
+ type=args.event_type,
155
+ source_timestamp=args.source_timestamp,
156
+ payload=_read_payload(payload_type=args.payload_type,
157
+ binary_type=args.binary_type,
158
+ path=args.payload_path))
159
+
160
+ co = register(addr=addr,
161
+ client_name=args.client_name,
162
+ client_token=args.client_token,
163
+ register_event=register_event)
164
+
165
+ elif args.action == 'query':
166
+ if args.query_type == 'latest':
167
+ params = common.QueryLatestParams(
168
+ event_types=args.event_types)
169
+
170
+ elif args.query_type == 'timeseries':
171
+ params = common.QueryTimeseriesParams(
172
+ event_types=args.event_types,
173
+ t_from=args.t_from,
174
+ t_to=args.t_to,
175
+ source_t_from=args.source_t_from,
176
+ source_t_to=args.source_t_to,
177
+ order=args.order,
178
+ order_by=args.order_by,
179
+ max_results=args.max_results,
180
+ last_event_id=args.last_event_id)
181
+
182
+ elif args.query_type == 'server':
183
+ params = common.QueryTimeseriesParams(
184
+ server_id=args.server_id,
185
+ persisted=args.persisted,
186
+ max_results=args.max_results,
187
+ last_event_id=args.last_event_id)
188
+
189
+ else:
190
+ raise ValueError('unsupported query type')
191
+
192
+ co = query(addr=addr,
193
+ client_name=args.client_name,
194
+ client_token=args.client_token,
195
+ params=params)
196
+
197
+ elif args.action == 'subscribe':
198
+ subscriptions = args.event_types or [('*', )]
199
+
200
+ co = subscribe(addr=addr,
201
+ client_name=args.client_name,
202
+ client_token=args.client_token,
203
+ subscriptions=subscriptions,
204
+ server_id=args.server_id,
205
+ persisted=args.persisted)
206
+
207
+ elif args.action == 'server':
208
+ subscriptions = args.event_types or [('*', )]
209
+
210
+ co = server(addr=addr,
211
+ client_name=args.client_name,
212
+ client_token=args.client_token,
213
+ subscriptions=subscriptions,
214
+ server_id=args.server_id,
215
+ persisted=args.persisted)
216
+
217
+ else:
218
+ raise ValueError('unsupported action')
219
+
220
+ aio.init_asyncio()
221
+ with contextlib.suppress(asyncio.CancelledError):
222
+ return aio.run_asyncio(co)
223
+
224
+
225
+ async def register(addr: tcp.Address,
226
+ client_name: str,
227
+ client_token: str | None,
228
+ register_event: common.RegisterEvent):
229
+ client = await eventer.connect(addr=addr,
230
+ client_name=client_name,
231
+ client_token=client_token)
232
+
233
+ try:
234
+ result = await client.register([register_event])
235
+
236
+ if result is None:
237
+ return 1
238
+
239
+ finally:
240
+ await aio.uncancellable(client.async_close())
241
+
242
+
243
+ async def query(addr: tcp.Address,
244
+ client_name: str,
245
+ client_token: str | None,
246
+ params: common.QueryParams):
247
+ client = await eventer.connect(addr=addr,
248
+ client_name=client_name,
249
+ client_token=client_token)
250
+
251
+ try:
252
+ result = await client.query(params)
253
+
254
+ result_json = common.query_result_to_json(result)
255
+ print(json.encode(result_json))
256
+
257
+ finally:
258
+ await aio.uncancellable(client.async_close())
259
+
260
+
261
+ async def subscribe(addr: tcp.Address,
262
+ client_name: str,
263
+ client_token: str | None,
264
+ subscriptions: Collection[common.EventType],
265
+ server_id: int | None,
266
+ persisted: bool):
267
+
268
+ def on_events(client, events):
269
+ events_json = [common.event_to_json(event) for event in events]
270
+ print(json.encode(events_json))
271
+
272
+ client = await eventer.connect(addr=addr,
273
+ client_name=client_name,
274
+ client_token=client_token,
275
+ subscriptions=subscriptions,
276
+ server_id=server_id,
277
+ persisted=persisted,
278
+ events_cb=on_events)
279
+
280
+ try:
281
+ await client.wait_closing()
282
+
283
+ finally:
284
+ await aio.uncancellable(client.async_close())
285
+
286
+
287
+ async def server(addr: tcp.Address,
288
+ client_name: str,
289
+ client_token: str | None,
290
+ subscriptions: Collection[common.EventType],
291
+ server_id: int | None,
292
+ persisted: bool):
293
+ raise NotImplementedError()
294
+
295
+
296
+ def _parse_timestamp(t):
297
+ if t == 'now':
298
+ return common.now()
299
+
300
+ return common.timestamp_from_float(float(t))
301
+
302
+
303
+ def _parse_event_type(event_type):
304
+ return tuple(event_type.split('/'))
305
+
306
+
307
+ def _parse_event_id(event_id):
308
+ return common.EventId(event_id.split(','))
309
+
310
+
311
+ def _parse_order(order):
312
+ return common.Order[order]
313
+
314
+
315
+ def _parse_order_by(order_by):
316
+ return common.OrderBy[order_by]
317
+
318
+
319
+ def _read_payload(payload_type, binary_type, path):
320
+ if payload_type == 'none':
321
+ return
322
+
323
+ if path == Path('-'):
324
+ if payload_type == 'json' or payload_type is None:
325
+ json_format = json.Format.JSON
326
+
327
+ elif payload_type == 'yaml':
328
+ json_format = json.Format.YAML
329
+
330
+ elif payload_type == 'toml':
331
+ json_format = json.Format.TOML
332
+
333
+ elif payload_type == 'binary':
334
+ json_format = None
335
+
336
+ else:
337
+ raise ValueError('unsupported payload type')
338
+
339
+ if json_format is None or json_format == json.Format.TOML:
340
+ stdin, sys.stdin = sys.stdin.detach(), None
341
+
342
+ else:
343
+ stdin = sys.stdin
344
+
345
+ if json_format:
346
+ data = json.decode_stream(stdin, json_format)
347
+ return common.EventPayloadJson(data)
348
+
349
+ else:
350
+ data = stdin.read()
351
+ return common.EventPayloadBinary(type=binary_type,
352
+ data=data)
353
+
354
+ if payload_type is None:
355
+ try:
356
+ json_format = json.get_file_format(path)
357
+
358
+ except ValueError:
359
+ json_format = None
360
+
361
+ elif payload_type == 'json':
362
+ json_format = json.Format.JSON
363
+
364
+ elif payload_type == 'yaml':
365
+ json_format = json.Format.YAML
366
+
367
+ elif payload_type == 'toml':
368
+ json_format = json.Format.TOML
369
+
370
+ elif payload_type == 'binary':
371
+ json_format = None
372
+
373
+ else:
374
+ raise ValueError('unsupported payload type')
375
+
376
+ if json_format:
377
+ data = json.decode_file(path, json_format)
378
+ return common.EventPayloadJson(data)
379
+
380
+ data = path.read_bytes()
381
+ return common.EventPayloadBinary(type=binary_type,
382
+ data=data)
383
+
384
+
385
+ if __name__ == '__main__':
386
+ sys.argv[0] = 'hat-event-manager'
387
+ sys.exit(main())
File without changes
@@ -0,0 +1,8 @@
1
+ import sys
2
+
3
+ from hat.event.server.main import main
4
+
5
+
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = 'hat-event-server'
8
+ sys.exit(main())
@@ -0,0 +1,43 @@
1
+ import logging
2
+
3
+ from hat import aio
4
+ from hat import json
5
+ from hat.drivers import tcp
6
+
7
+ from hat.event import adminer
8
+
9
+
10
+ async def create_adminer_server(addr: tcp.Address,
11
+ log_conf: json.Data,
12
+ **kwargs
13
+ ) -> 'AdminerServer':
14
+ """Create adminer server"""
15
+ server = AdminerServer()
16
+ server._log_conf = log_conf
17
+
18
+ server._srv = await adminer.listen(addr,
19
+ get_log_conf_cb=server._on_get_log_conf,
20
+ set_log_conf_cb=server._on_set_log_conf,
21
+ **kwargs)
22
+
23
+ return server
24
+
25
+
26
+ class AdminerServer(aio.Resource):
27
+ """Adminer server
28
+
29
+ For creating new server see `create_adminer_server` coroutine.
30
+
31
+ """
32
+
33
+ @property
34
+ def async_group(self) -> aio.Group:
35
+ """Async group"""
36
+ return self._srv.async_group
37
+
38
+ def _on_get_log_conf(self):
39
+ return self._log_conf
40
+
41
+ def _on_set_log_conf(self, log_conf):
42
+ logging.config.dictConfig(log_conf)
43
+ self._log_conf = log_conf
@@ -0,0 +1,216 @@
1
+ """Engine"""
2
+
3
+ from collections.abc import Callable, Collection, Iterable
4
+ import asyncio
5
+ import collections
6
+ import logging
7
+
8
+ from hat import aio
9
+ from hat import json
10
+
11
+ from hat.event import common
12
+ import hat.event.server.eventer_server
13
+
14
+
15
+ mlog: logging.Logger = logging.getLogger(__name__)
16
+ """Module logger"""
17
+
18
+
19
+ async def create_engine(backend: common.Backend,
20
+ eventer_server: hat.event.server.eventer_server.EventerServer, # NOQA
21
+ module_confs: Iterable[json.Data],
22
+ server_id: int,
23
+ restart_cb: Callable[[], None],
24
+ reset_monitor_ready_cb: Callable[[], None],
25
+ register_queue_size: int = 1024
26
+ ) -> 'Engine':
27
+ """Create engine"""
28
+ engine = Engine()
29
+ engine._backend = backend
30
+ engine._eventer_server = eventer_server
31
+ engine._server_id = server_id
32
+ engine._restart_cb = restart_cb
33
+ engine._reset_monitor_ready_cb = reset_monitor_ready_cb
34
+ engine._loop = asyncio.get_running_loop()
35
+ engine._async_group = aio.Group()
36
+ engine._register_queue = aio.Queue(register_queue_size)
37
+ engine._source_modules = collections.deque()
38
+
39
+ engine._last_event_id = await backend.get_last_event_id(server_id)
40
+
41
+ future = engine._loop.create_future()
42
+ source = common.Source(type=common.SourceType.ENGINE, id=0)
43
+ events = [engine._create_status_reg_event('STARTED')]
44
+ engine._register_queue.put_nowait((future, source, events))
45
+
46
+ try:
47
+ for source_id, module_conf in enumerate(module_confs):
48
+ info = common.import_module_info(module_conf['module'])
49
+ source = common.Source(type=common.SourceType.MODULE,
50
+ id=source_id)
51
+
52
+ module = await engine.async_group.spawn(
53
+ aio.call, info.create, module_conf, engine, source)
54
+ engine.async_group.spawn(aio.call_on_cancel, module.async_close)
55
+ engine.async_group.spawn(aio.call_on_done, module.wait_closing(),
56
+ engine.close)
57
+
58
+ engine._source_modules.append((source, module))
59
+
60
+ engine.async_group.spawn(engine._register_loop)
61
+
62
+ except BaseException:
63
+ await aio.uncancellable(engine.async_close())
64
+ raise
65
+
66
+ return engine
67
+
68
+
69
+ class Engine(common.Engine):
70
+
71
+ @property
72
+ def async_group(self) -> aio.Group:
73
+ """Async group"""
74
+ return self._async_group
75
+
76
+ @property
77
+ def server_id(self) -> int:
78
+ return self._server_id
79
+
80
+ async def register(self,
81
+ source: common.Source,
82
+ events: Collection[common.RegisterEvent]
83
+ ) -> Collection[common.Event] | None:
84
+ if not events:
85
+ return []
86
+
87
+ future = self._loop.create_future()
88
+
89
+ try:
90
+ await self._register_queue.put((future, source, events))
91
+
92
+ except aio.QueueClosedError:
93
+ raise Exception('engine closed')
94
+
95
+ return await future
96
+
97
+ async def query(self,
98
+ params: common.QueryParams
99
+ ) -> common.QueryResult:
100
+ return await self._backend.query(params)
101
+
102
+ def get_client_names(self) -> Iterable[tuple[common.Source, str]]:
103
+ return self._eventer_server.get_client_names()
104
+
105
+ def restart(self):
106
+ self._restart_cb()
107
+
108
+ def reset_monitor_ready(self):
109
+ self._reset_monitor_ready_cb()
110
+
111
+ async def _register_loop(self):
112
+ future = None
113
+ mlog.debug("starting register loop")
114
+
115
+ try:
116
+ while True:
117
+ mlog.debug("waiting for register requests")
118
+ future, source, register_events = \
119
+ await self._register_queue.get()
120
+
121
+ mlog.debug("processing session")
122
+ events = await self._process_sessions(source, register_events)
123
+
124
+ mlog.debug("registering to backend")
125
+ events = await self._backend.register(events)
126
+
127
+ if future.done():
128
+ continue
129
+
130
+ result = (
131
+ list(event for event, _ in zip(events, register_events))
132
+ if events is not None else None)
133
+ future.set_result(result)
134
+
135
+ except Exception as e:
136
+ mlog.error("register loop error: %s", e, exc_info=e)
137
+
138
+ finally:
139
+ mlog.debug("register loop closed")
140
+ self.close()
141
+ self._register_queue.close()
142
+
143
+ while True:
144
+ if future and not future.done():
145
+ future.set_exception(Exception('engine closed'))
146
+ if self._register_queue.empty():
147
+ break
148
+ future, _, __ = self._register_queue.get_nowait()
149
+
150
+ timestamp = self._create_session()
151
+ status_reg_event = self._create_status_reg_event('STOPPED')
152
+ events = [self._create_event(timestamp, status_reg_event)]
153
+ await self._backend.register(events)
154
+
155
+ async def _process_sessions(self, source, register_events):
156
+ timestamp = self._create_session()
157
+
158
+ for _, module in self._source_modules:
159
+ await aio.call(module.on_session_start,
160
+ self._last_event_id.session)
161
+
162
+ events = collections.deque(
163
+ self._create_event(timestamp, register_event)
164
+ for register_event in register_events)
165
+
166
+ input_source_events = [(source, event) for event in events]
167
+ while input_source_events:
168
+ output_source_events = collections.deque()
169
+
170
+ for output_source, module in self._source_modules:
171
+ for input_source, input_event in input_source_events:
172
+ if not module.subscription.matches(input_event.type):
173
+ continue
174
+
175
+ output_register_events = await aio.call(
176
+ module.process, input_source, input_event)
177
+
178
+ if not output_register_events:
179
+ continue
180
+
181
+ for output_register_event in output_register_events:
182
+ output_event = self._create_event(
183
+ timestamp, output_register_event)
184
+ output_source_events.append(
185
+ (output_source, output_event))
186
+ events.append(output_event)
187
+
188
+ input_source_events = output_source_events
189
+
190
+ for _, module in self._source_modules:
191
+ await aio.call(module.on_session_stop, self._last_event_id.session)
192
+
193
+ return events
194
+
195
+ def _create_status_reg_event(self, status):
196
+ return common.RegisterEvent(
197
+ type=('event', str(self._server_id), 'engine'),
198
+ source_timestamp=None,
199
+ payload=common.EventPayloadJson(status))
200
+
201
+ def _create_session(self):
202
+ self._last_event_id = self._last_event_id._replace(
203
+ session=self._last_event_id.session + 1,
204
+ instance=0)
205
+
206
+ return common.now()
207
+
208
+ def _create_event(self, timestamp, register_event):
209
+ self._last_event_id = self._last_event_id._replace(
210
+ instance=self._last_event_id.instance + 1)
211
+
212
+ return common.Event(id=self._last_event_id,
213
+ type=register_event.type,
214
+ timestamp=timestamp,
215
+ source_timestamp=register_event.source_timestamp,
216
+ payload=register_event.payload)