hat-event 0.9.27__cp310.cp311.cp312.cp313-abi3-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hat/event/__init__.py +1 -0
- hat/event/adminer/__init__.py +18 -0
- hat/event/adminer/client.py +124 -0
- hat/event/adminer/common.py +27 -0
- hat/event/adminer/server.py +111 -0
- hat/event/backends/__init__.py +0 -0
- hat/event/backends/dummy.py +49 -0
- hat/event/backends/lmdb/__init__.py +9 -0
- hat/event/backends/lmdb/backend.py +319 -0
- hat/event/backends/lmdb/common.py +277 -0
- hat/event/backends/lmdb/conditions.py +102 -0
- hat/event/backends/lmdb/convert/__init__.py +0 -0
- hat/event/backends/lmdb/convert/__main__.py +8 -0
- hat/event/backends/lmdb/convert/convert_v06_to_v07.py +213 -0
- hat/event/backends/lmdb/convert/convert_v07_to_v09.py +175 -0
- hat/event/backends/lmdb/convert/main.py +88 -0
- hat/event/backends/lmdb/convert/v06.py +216 -0
- hat/event/backends/lmdb/convert/v07.py +508 -0
- hat/event/backends/lmdb/convert/v09.py +50 -0
- hat/event/backends/lmdb/convert/version.py +63 -0
- hat/event/backends/lmdb/environment.py +100 -0
- hat/event/backends/lmdb/latestdb.py +116 -0
- hat/event/backends/lmdb/manager/__init__.py +0 -0
- hat/event/backends/lmdb/manager/__main__.py +8 -0
- hat/event/backends/lmdb/manager/common.py +45 -0
- hat/event/backends/lmdb/manager/copy.py +92 -0
- hat/event/backends/lmdb/manager/main.py +34 -0
- hat/event/backends/lmdb/manager/query.py +215 -0
- hat/event/backends/lmdb/refdb.py +234 -0
- hat/event/backends/lmdb/systemdb.py +102 -0
- hat/event/backends/lmdb/timeseriesdb.py +486 -0
- hat/event/backends/memory.py +178 -0
- hat/event/common/__init__.py +144 -0
- hat/event/common/backend.py +91 -0
- hat/event/common/collection/__init__.py +8 -0
- hat/event/common/collection/common.py +28 -0
- hat/event/common/collection/list.py +19 -0
- hat/event/common/collection/tree.py +62 -0
- hat/event/common/common.py +176 -0
- hat/event/common/encoder.py +305 -0
- hat/event/common/json_schema_repo.json +1 -0
- hat/event/common/matches.py +44 -0
- hat/event/common/module.py +142 -0
- hat/event/common/sbs_repo.json +1 -0
- hat/event/common/subscription/__init__.py +22 -0
- hat/event/common/subscription/_csubscription.abi3.pyd +0 -0
- hat/event/common/subscription/common.py +145 -0
- hat/event/common/subscription/csubscription.py +47 -0
- hat/event/common/subscription/pysubscription.py +97 -0
- hat/event/component.py +284 -0
- hat/event/eventer/__init__.py +28 -0
- hat/event/eventer/client.py +260 -0
- hat/event/eventer/common.py +27 -0
- hat/event/eventer/server.py +286 -0
- hat/event/manager/__init__.py +0 -0
- hat/event/manager/__main__.py +8 -0
- hat/event/manager/common.py +48 -0
- hat/event/manager/main.py +387 -0
- hat/event/server/__init__.py +0 -0
- hat/event/server/__main__.py +8 -0
- hat/event/server/adminer_server.py +43 -0
- hat/event/server/engine.py +216 -0
- hat/event/server/engine_runner.py +127 -0
- hat/event/server/eventer_client.py +205 -0
- hat/event/server/eventer_client_runner.py +152 -0
- hat/event/server/eventer_server.py +119 -0
- hat/event/server/main.py +84 -0
- hat/event/server/main_runner.py +212 -0
- hat_event-0.9.27.dist-info/LICENSE +202 -0
- hat_event-0.9.27.dist-info/METADATA +108 -0
- hat_event-0.9.27.dist-info/RECORD +73 -0
- hat_event-0.9.27.dist-info/WHEEL +7 -0
- hat_event-0.9.27.dist-info/entry_points.txt +5 -0
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
from hat.event.backends.lmdb.convert import v07
|
|
4
|
+
from hat.event.backends.lmdb.convert import v09
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def convert(src_path: Path,
|
|
8
|
+
dst_path: Path):
|
|
9
|
+
with v07.create_env(src_path) as src_env:
|
|
10
|
+
with v09.create_env(dst_path) as dst_env:
|
|
11
|
+
src_dbs = {db_type: v07.open_db(src_env, db_type)
|
|
12
|
+
for db_type in v07.DbType}
|
|
13
|
+
dst_dbs = {db_type: v09.open_db(dst_env, db_type)
|
|
14
|
+
for db_type in v09.DbType}
|
|
15
|
+
|
|
16
|
+
with src_env.begin(buffers=True) as src_txn:
|
|
17
|
+
with dst_env.begin(write=True) as dst_txn:
|
|
18
|
+
_convert_system_db(src_txn, src_dbs, dst_txn, dst_dbs)
|
|
19
|
+
_convert_ref_db(src_txn, src_dbs, dst_txn, dst_dbs)
|
|
20
|
+
_convert_latest_db(src_txn, src_dbs, dst_txn, dst_dbs)
|
|
21
|
+
_convert_timeseries_db(src_txn, src_dbs, dst_txn, dst_dbs)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _convert_system_db(src_txn, src_dbs, dst_txn, dst_dbs):
|
|
25
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.SYSTEM_SETTINGS,
|
|
26
|
+
v09.SettingsId.VERSION, v09.version)
|
|
27
|
+
|
|
28
|
+
with src_txn.cursor(db=src_dbs[v07.DbType.SYSTEM]) as src_cursor:
|
|
29
|
+
for src_key, src_value in src_cursor:
|
|
30
|
+
server_id = v07.decode_system_db_key(src_key)
|
|
31
|
+
src_event_id, src_timestamp = v07.decode_system_db_value(src_value)
|
|
32
|
+
|
|
33
|
+
dst_event_id = _convert_event_id(src_event_id)
|
|
34
|
+
dst_timestamp = _convert_timestamp(src_timestamp)
|
|
35
|
+
|
|
36
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.SYSTEM_LAST_EVENT_ID,
|
|
37
|
+
server_id, dst_event_id)
|
|
38
|
+
|
|
39
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.SYSTEM_LAST_TIMESTAMP,
|
|
40
|
+
server_id, dst_timestamp)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _convert_ref_db(src_txn, src_dbs, dst_txn, dst_dbs):
|
|
44
|
+
with src_txn.cursor(db=src_dbs[v07.DbType.REF]) as src_cursor:
|
|
45
|
+
for src_key, src_value in src_cursor:
|
|
46
|
+
src_event_id = v07.decode_ref_db_key(src_key)
|
|
47
|
+
src_event_refs = v07.decode_ref_db_value(src_value)
|
|
48
|
+
|
|
49
|
+
dst_event_id = _convert_event_id(src_event_id)
|
|
50
|
+
dst_event_refs = {_convert_event_ref(src_event_ref)
|
|
51
|
+
for src_event_ref in src_event_refs}
|
|
52
|
+
|
|
53
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.REF,
|
|
54
|
+
dst_event_id, dst_event_refs)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def _convert_latest_db(src_txn, src_dbs, dst_txn, dst_dbs):
|
|
58
|
+
with src_txn.cursor(db=src_dbs[v07.DbType.LATEST_DATA]) as src_cursor:
|
|
59
|
+
for src_key, src_value in src_cursor:
|
|
60
|
+
event_type_ref = v07.decode_latest_data_db_key(src_key)
|
|
61
|
+
src_event = v07.decode_latest_data_db_value(src_value)
|
|
62
|
+
|
|
63
|
+
dst_event = _convert_event(src_event)
|
|
64
|
+
|
|
65
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.LATEST_DATA,
|
|
66
|
+
event_type_ref, dst_event)
|
|
67
|
+
|
|
68
|
+
with src_txn.cursor(db=src_dbs[v07.DbType.LATEST_TYPE]) as src_cursor:
|
|
69
|
+
for src_key, src_value in src_cursor:
|
|
70
|
+
event_type_ref = v07.decode_latest_type_db_key(src_key)
|
|
71
|
+
event_type = v07.decode_latest_type_db_value(src_value)
|
|
72
|
+
|
|
73
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.LATEST_TYPE,
|
|
74
|
+
event_type_ref, event_type)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _convert_timeseries_db(src_txn, src_dbs, dst_txn, dst_dbs):
|
|
78
|
+
with src_txn.cursor(db=src_dbs[v07.DbType.ORDERED_DATA]) as src_cursor:
|
|
79
|
+
for src_key, src_value in src_cursor:
|
|
80
|
+
partition_id, src_timestamp, src_event_id = \
|
|
81
|
+
v07.decode_ordered_data_db_key(src_key)
|
|
82
|
+
src_event = v07.decode_ordered_data_db_value(src_value)
|
|
83
|
+
|
|
84
|
+
dst_timestamp = _convert_timestamp(src_timestamp)
|
|
85
|
+
dst_event_id = _convert_event_id(src_event_id)
|
|
86
|
+
dst_event = _convert_event(src_event)
|
|
87
|
+
|
|
88
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.TIMESERIES_DATA,
|
|
89
|
+
(partition_id, dst_timestamp, dst_event_id), dst_event)
|
|
90
|
+
|
|
91
|
+
with src_txn.cursor(db=src_dbs[v07.DbType.ORDERED_PARTITION]) as src_cursor: # NOQA
|
|
92
|
+
for src_key, src_value in src_cursor:
|
|
93
|
+
partition_id = v07.decode_ordered_partition_db_key(src_key)
|
|
94
|
+
src_partition_data = v07.decode_ordered_partition_db_value(
|
|
95
|
+
src_value)
|
|
96
|
+
|
|
97
|
+
dst_partition_data = _convert_partition_data(src_partition_data)
|
|
98
|
+
|
|
99
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.TIMESERIES_PARTITION,
|
|
100
|
+
partition_id, dst_partition_data)
|
|
101
|
+
|
|
102
|
+
with src_txn.cursor(db=src_dbs[v07.DbType.ORDERED_COUNT]) as src_cursor:
|
|
103
|
+
for src_key, src_value in src_cursor:
|
|
104
|
+
partition_id = v07.decode_ordered_count_db_key(src_key)
|
|
105
|
+
count = v07.decode_ordered_count_db_value(src_value)
|
|
106
|
+
|
|
107
|
+
v09.write(dst_dbs, dst_txn, v09.DbType.TIMESERIES_COUNT,
|
|
108
|
+
partition_id, count)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def _convert_event_id(src_event_id):
|
|
112
|
+
return v09.EventId(*src_event_id)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def _convert_timestamp(src_timestamp):
|
|
116
|
+
return v09.Timestamp(*src_timestamp)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _convert_event_ref(src_event_ref):
|
|
120
|
+
if isinstance(src_event_ref, v07.LatestEventRef):
|
|
121
|
+
return v09.LatestEventRef(src_event_ref.key)
|
|
122
|
+
|
|
123
|
+
if isinstance(src_event_ref, v07.OrderedEventRef):
|
|
124
|
+
partition_id, src_timestamp, src_event_id = src_event_ref.key
|
|
125
|
+
|
|
126
|
+
dst_timestamp = _convert_timestamp(src_timestamp)
|
|
127
|
+
dst_event_id = _convert_event_id(src_event_id)
|
|
128
|
+
|
|
129
|
+
dst_key = partition_id, dst_timestamp, dst_event_id
|
|
130
|
+
return v09.TimeseriesEventRef(dst_key)
|
|
131
|
+
|
|
132
|
+
raise ValueError('unsupported event reference')
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _convert_event(src_event):
|
|
136
|
+
return v09.Event(
|
|
137
|
+
id=_convert_event_id(src_event.event_id),
|
|
138
|
+
type=src_event.event_type,
|
|
139
|
+
timestamp=_convert_timestamp(src_event.timestamp),
|
|
140
|
+
source_timestamp=(_convert_timestamp(src_event.source_timestamp)
|
|
141
|
+
if src_event.source_timestamp else None),
|
|
142
|
+
payload=_convert_payload(src_event.payload))
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
def _convert_payload(src_payload):
|
|
146
|
+
if src_payload is None:
|
|
147
|
+
return
|
|
148
|
+
|
|
149
|
+
if src_payload.type == v07.EventPayloadType.BINARY:
|
|
150
|
+
return v09.EventPayloadBinary(type='',
|
|
151
|
+
data=src_payload.data)
|
|
152
|
+
|
|
153
|
+
if src_payload.type == v07.EventPayloadType.JSON:
|
|
154
|
+
return v09.EventPayloadJson(data=src_payload.data)
|
|
155
|
+
|
|
156
|
+
if src_payload.type == v07.EventPayloadType.SBS:
|
|
157
|
+
binary_type = (f'{src_payload.data.module}.{src_payload.data.type}'
|
|
158
|
+
if src_payload.data.module is not None else
|
|
159
|
+
src_payload.data.type)
|
|
160
|
+
return v09.EventPayloadBinary(type=binary_type,
|
|
161
|
+
data=src_payload.data.data)
|
|
162
|
+
|
|
163
|
+
raise ValueError('unsupported payload type')
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def _convert_partition_data(src_data):
|
|
167
|
+
src_order_by = v07.OrderBy(src_data['order'])
|
|
168
|
+
dst_order_by = _convert_order_by(src_order_by)
|
|
169
|
+
|
|
170
|
+
return {'order': dst_order_by.value,
|
|
171
|
+
'subscriptions': src_data['subscriptions']}
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def _convert_order_by(src_order_by):
|
|
175
|
+
return v09.OrderBy[src_order_by.name]
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
import sys
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from hat import util
|
|
6
|
+
|
|
7
|
+
from hat.event.backends.lmdb.convert import convert_v06_to_v07
|
|
8
|
+
from hat.event.backends.lmdb.convert import convert_v07_to_v09
|
|
9
|
+
from hat.event.backends.lmdb.convert.version import Version, get_version
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class Conversion(typing.NamedTuple):
|
|
13
|
+
src_version: Version
|
|
14
|
+
dst_version: Version
|
|
15
|
+
convert: typing.Callable[[Path, Path], None]
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
target_version = Version.v09
|
|
19
|
+
|
|
20
|
+
conversions = [Conversion(src_version=Version.v06,
|
|
21
|
+
dst_version=Version.v07,
|
|
22
|
+
convert=convert_v06_to_v07.convert),
|
|
23
|
+
Conversion(src_version=Version.v07,
|
|
24
|
+
dst_version=Version.v09,
|
|
25
|
+
convert=convert_v07_to_v09.convert)]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def main():
|
|
29
|
+
if len(sys.argv) != 3:
|
|
30
|
+
print(f"Usage: {sys.argv[0]} SRC_DB_PATH DST_DB_PATH", file=sys.stderr)
|
|
31
|
+
sys.exit(1)
|
|
32
|
+
|
|
33
|
+
src_path = Path(sys.argv[1])
|
|
34
|
+
dst_path = Path(sys.argv[2])
|
|
35
|
+
|
|
36
|
+
if not src_path.exists():
|
|
37
|
+
print(f"invalid SRC_DB_PATH: {src_path}", file=sys.stderr)
|
|
38
|
+
sys.exit(1)
|
|
39
|
+
|
|
40
|
+
if dst_path.exists():
|
|
41
|
+
print(f"existing DST_DB_PATH: {dst_path}", file=sys.stderr)
|
|
42
|
+
sys.exit(1)
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
convert(src_path, dst_path)
|
|
46
|
+
|
|
47
|
+
except Exception as e:
|
|
48
|
+
print(f"conversion error: {e}", file=sys.stderr)
|
|
49
|
+
sys.exit(1)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def convert(src_path: Path,
|
|
53
|
+
dst_path: Path):
|
|
54
|
+
version = get_version(src_path)
|
|
55
|
+
|
|
56
|
+
if version == target_version:
|
|
57
|
+
raise Exception(f"{src_path} already up to date")
|
|
58
|
+
|
|
59
|
+
tmp_src_path = src_path
|
|
60
|
+
|
|
61
|
+
while version != target_version:
|
|
62
|
+
conversion = util.first(conversions,
|
|
63
|
+
lambda i: i.src_version == version)
|
|
64
|
+
if not conversion:
|
|
65
|
+
raise Exception(f"unsupported version {version.value}")
|
|
66
|
+
|
|
67
|
+
if conversion.dst_version == target_version:
|
|
68
|
+
tmp_dst_path = dst_path
|
|
69
|
+
|
|
70
|
+
else:
|
|
71
|
+
tmp_dst_path = dst_path.with_suffix(
|
|
72
|
+
f"{dst_path.suffix}.{conversion.dst_version.name}")
|
|
73
|
+
|
|
74
|
+
if tmp_dst_path.exists():
|
|
75
|
+
tmp_dst_path.unlink()
|
|
76
|
+
|
|
77
|
+
conversion.convert(tmp_src_path, tmp_dst_path)
|
|
78
|
+
version = conversion.dst_version
|
|
79
|
+
|
|
80
|
+
if tmp_src_path != src_path:
|
|
81
|
+
tmp_src_path.unlink()
|
|
82
|
+
|
|
83
|
+
tmp_src_path = tmp_dst_path
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
if __name__ == '__main__':
|
|
87
|
+
sys.argv[0] = 'hat-event-lmdb-convert'
|
|
88
|
+
main()
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
import enum
|
|
3
|
+
import struct
|
|
4
|
+
import platform
|
|
5
|
+
import typing
|
|
6
|
+
|
|
7
|
+
import lmdb
|
|
8
|
+
|
|
9
|
+
from hat import json
|
|
10
|
+
from hat import sbs
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
EventType: typing.TypeAlias = typing.Tuple[str, ...]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
EventPayloadType = enum.Enum('EventPayloadType', [
|
|
17
|
+
'BINARY',
|
|
18
|
+
'JSON',
|
|
19
|
+
'SBS'])
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class EventId(typing.NamedTuple):
|
|
23
|
+
server: int
|
|
24
|
+
instance: int
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class SbsData(typing.NamedTuple):
|
|
28
|
+
module: str | None
|
|
29
|
+
type: str
|
|
30
|
+
data: bytes
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class EventPayload(typing.NamedTuple):
|
|
34
|
+
type: EventPayloadType
|
|
35
|
+
data: bytes | json.Data | SbsData
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class Timestamp(typing.NamedTuple):
|
|
39
|
+
s: int
|
|
40
|
+
us: int
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class Event(typing.NamedTuple):
|
|
44
|
+
event_id: EventId
|
|
45
|
+
event_type: EventType
|
|
46
|
+
timestamp: Timestamp
|
|
47
|
+
source_timestamp: Timestamp | None
|
|
48
|
+
payload: EventPayload | None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def decode_uint(x: bytes) -> int:
|
|
52
|
+
return struct.unpack(">Q", x)[0]
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def decode_timestamp(x: bytes) -> Timestamp:
|
|
56
|
+
res = struct.unpack(">QI", x)
|
|
57
|
+
return Timestamp(res[0] - (1 << 63), res[1])
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def decode_tuple_str(x: bytes) -> typing.Tuple[str, ...]:
|
|
61
|
+
return tuple(json.decode(str(x, encoding='utf-8')))
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def decode_json(x: bytes) -> json.Data:
|
|
65
|
+
return json.decode(str(x, encoding='utf-8'))
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def decode_uint_timestamp_uint(x: bytes
|
|
69
|
+
) -> tuple[int, Timestamp, int]:
|
|
70
|
+
res = struct.unpack(">QQIQ", x)
|
|
71
|
+
return res[0], Timestamp(res[1] - (1 << 63), res[2]), res[3]
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def decode_event(event_bytes: bytes) -> Event:
|
|
75
|
+
event_sbs = _sbs_repo.decode('HatEvent.Event', event_bytes)
|
|
76
|
+
return _event_from_sbs(event_sbs)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def create_env(path: Path):
|
|
80
|
+
max_dbs = 5
|
|
81
|
+
max_db_size = (512 * 1024 * 1024 * 1024
|
|
82
|
+
if platform.architecture()[0] == '64bit'
|
|
83
|
+
else 1024 * 1024 * 1024)
|
|
84
|
+
return lmdb.Environment(str(path),
|
|
85
|
+
map_size=max_db_size,
|
|
86
|
+
subdir=False,
|
|
87
|
+
max_dbs=max_dbs)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
_sbs_repo = sbs.Repository(r"""
|
|
91
|
+
module HatEvent
|
|
92
|
+
|
|
93
|
+
MsgSubscribe = Array(EventType)
|
|
94
|
+
|
|
95
|
+
MsgNotify = Array(Event)
|
|
96
|
+
|
|
97
|
+
MsgRegisterReq = Array(RegisterEvent)
|
|
98
|
+
|
|
99
|
+
MsgRegisterRes = Array(Choice {
|
|
100
|
+
event: Event
|
|
101
|
+
failure: None
|
|
102
|
+
})
|
|
103
|
+
|
|
104
|
+
MsgQueryReq = QueryData
|
|
105
|
+
|
|
106
|
+
MsgQueryRes = Array(Event)
|
|
107
|
+
|
|
108
|
+
Timestamp = Record {
|
|
109
|
+
s: Integer
|
|
110
|
+
us: Integer
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
EventId = Record {
|
|
114
|
+
server: Integer
|
|
115
|
+
instance: Integer
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
Order = Choice {
|
|
119
|
+
descending: None
|
|
120
|
+
ascending: None
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
OrderBy = Choice {
|
|
124
|
+
timestamp: None
|
|
125
|
+
sourceTimestamp: None
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
EventType = Array(String)
|
|
129
|
+
|
|
130
|
+
EventPayload = Choice {
|
|
131
|
+
binary: Bytes
|
|
132
|
+
json: String
|
|
133
|
+
sbs: Record {
|
|
134
|
+
module: Optional(String)
|
|
135
|
+
type: String
|
|
136
|
+
data: Bytes
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
Event = Record {
|
|
141
|
+
id: EventId
|
|
142
|
+
type: EventType
|
|
143
|
+
timestamp: Timestamp
|
|
144
|
+
sourceTimestamp: Optional(Timestamp)
|
|
145
|
+
payload: Optional(EventPayload)
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
RegisterEvent = Record {
|
|
149
|
+
type: EventType
|
|
150
|
+
sourceTimestamp: Optional(Timestamp)
|
|
151
|
+
payload: Optional(EventPayload)
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
QueryData = Record {
|
|
155
|
+
ids: Optional(Array(EventId))
|
|
156
|
+
types: Optional(Array(EventType))
|
|
157
|
+
tFrom: Optional(Timestamp)
|
|
158
|
+
tTo: Optional(Timestamp)
|
|
159
|
+
sourceTFrom: Optional(Timestamp)
|
|
160
|
+
sourceTTo: Optional(Timestamp)
|
|
161
|
+
payload: Optional(EventPayload)
|
|
162
|
+
order: Order
|
|
163
|
+
orderBy: OrderBy
|
|
164
|
+
uniqueType: Boolean
|
|
165
|
+
maxResults: Optional(Integer)
|
|
166
|
+
}
|
|
167
|
+
""")
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def _event_from_sbs(data: sbs.Data) -> Event:
|
|
171
|
+
return Event(
|
|
172
|
+
event_id=_event_id_from_sbs(data['id']),
|
|
173
|
+
event_type=tuple(data['type']),
|
|
174
|
+
timestamp=_timestamp_from_sbs(data['timestamp']),
|
|
175
|
+
source_timestamp=_optional_from_sbs(data['sourceTimestamp'],
|
|
176
|
+
_timestamp_from_sbs),
|
|
177
|
+
payload=_optional_from_sbs(data['payload'], _event_payload_from_sbs))
|
|
178
|
+
|
|
179
|
+
|
|
180
|
+
def _event_payload_from_sbs(data: sbs.Data) -> EventPayload:
|
|
181
|
+
data_type, data_data = data
|
|
182
|
+
|
|
183
|
+
if data_type == 'binary':
|
|
184
|
+
return EventPayload(type=EventPayloadType.BINARY,
|
|
185
|
+
data=data_data)
|
|
186
|
+
|
|
187
|
+
if data_type == 'json':
|
|
188
|
+
return EventPayload(type=EventPayloadType.JSON,
|
|
189
|
+
data=json.decode(data_data))
|
|
190
|
+
|
|
191
|
+
if data_type == 'sbs':
|
|
192
|
+
return EventPayload(type=EventPayloadType.SBS,
|
|
193
|
+
data=_sbs_data_from_sbs(data_data))
|
|
194
|
+
|
|
195
|
+
raise ValueError('unsupported payload type')
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def _timestamp_from_sbs(data: sbs.Data) -> Timestamp:
|
|
199
|
+
return Timestamp(s=data['s'], us=data['us'])
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def _event_id_from_sbs(data: sbs.Data) -> EventId:
|
|
203
|
+
return EventId(server=data['server'],
|
|
204
|
+
instance=data['instance'])
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def _sbs_data_from_sbs(data: sbs.Data) -> SbsData:
|
|
208
|
+
return SbsData(module=_optional_from_sbs(data['module']),
|
|
209
|
+
type=data['type'],
|
|
210
|
+
data=data['data'])
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def _optional_from_sbs(data: sbs.Data,
|
|
214
|
+
fn=lambda i: i
|
|
215
|
+
) -> typing.Any | None:
|
|
216
|
+
return fn(data[1]) if data[0] == 'value' else None
|