hat-event 0.9.27__cp310.cp311.cp312.cp313-abi3-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hat/event/__init__.py +1 -0
- hat/event/adminer/__init__.py +18 -0
- hat/event/adminer/client.py +124 -0
- hat/event/adminer/common.py +27 -0
- hat/event/adminer/server.py +111 -0
- hat/event/backends/__init__.py +0 -0
- hat/event/backends/dummy.py +49 -0
- hat/event/backends/lmdb/__init__.py +9 -0
- hat/event/backends/lmdb/backend.py +319 -0
- hat/event/backends/lmdb/common.py +277 -0
- hat/event/backends/lmdb/conditions.py +102 -0
- hat/event/backends/lmdb/convert/__init__.py +0 -0
- hat/event/backends/lmdb/convert/__main__.py +8 -0
- hat/event/backends/lmdb/convert/convert_v06_to_v07.py +213 -0
- hat/event/backends/lmdb/convert/convert_v07_to_v09.py +175 -0
- hat/event/backends/lmdb/convert/main.py +88 -0
- hat/event/backends/lmdb/convert/v06.py +216 -0
- hat/event/backends/lmdb/convert/v07.py +508 -0
- hat/event/backends/lmdb/convert/v09.py +50 -0
- hat/event/backends/lmdb/convert/version.py +63 -0
- hat/event/backends/lmdb/environment.py +100 -0
- hat/event/backends/lmdb/latestdb.py +116 -0
- hat/event/backends/lmdb/manager/__init__.py +0 -0
- hat/event/backends/lmdb/manager/__main__.py +8 -0
- hat/event/backends/lmdb/manager/common.py +45 -0
- hat/event/backends/lmdb/manager/copy.py +92 -0
- hat/event/backends/lmdb/manager/main.py +34 -0
- hat/event/backends/lmdb/manager/query.py +215 -0
- hat/event/backends/lmdb/refdb.py +234 -0
- hat/event/backends/lmdb/systemdb.py +102 -0
- hat/event/backends/lmdb/timeseriesdb.py +486 -0
- hat/event/backends/memory.py +178 -0
- hat/event/common/__init__.py +144 -0
- hat/event/common/backend.py +91 -0
- hat/event/common/collection/__init__.py +8 -0
- hat/event/common/collection/common.py +28 -0
- hat/event/common/collection/list.py +19 -0
- hat/event/common/collection/tree.py +62 -0
- hat/event/common/common.py +176 -0
- hat/event/common/encoder.py +305 -0
- hat/event/common/json_schema_repo.json +1 -0
- hat/event/common/matches.py +44 -0
- hat/event/common/module.py +142 -0
- hat/event/common/sbs_repo.json +1 -0
- hat/event/common/subscription/__init__.py +22 -0
- hat/event/common/subscription/_csubscription.abi3.pyd +0 -0
- hat/event/common/subscription/common.py +145 -0
- hat/event/common/subscription/csubscription.py +47 -0
- hat/event/common/subscription/pysubscription.py +97 -0
- hat/event/component.py +284 -0
- hat/event/eventer/__init__.py +28 -0
- hat/event/eventer/client.py +260 -0
- hat/event/eventer/common.py +27 -0
- hat/event/eventer/server.py +286 -0
- hat/event/manager/__init__.py +0 -0
- hat/event/manager/__main__.py +8 -0
- hat/event/manager/common.py +48 -0
- hat/event/manager/main.py +387 -0
- hat/event/server/__init__.py +0 -0
- hat/event/server/__main__.py +8 -0
- hat/event/server/adminer_server.py +43 -0
- hat/event/server/engine.py +216 -0
- hat/event/server/engine_runner.py +127 -0
- hat/event/server/eventer_client.py +205 -0
- hat/event/server/eventer_client_runner.py +152 -0
- hat/event/server/eventer_server.py +119 -0
- hat/event/server/main.py +84 -0
- hat/event/server/main_runner.py +212 -0
- hat_event-0.9.27.dist-info/LICENSE +202 -0
- hat_event-0.9.27.dist-info/METADATA +108 -0
- hat_event-0.9.27.dist-info/RECORD +73 -0
- hat_event-0.9.27.dist-info/WHEEL +7 -0
- hat_event-0.9.27.dist-info/entry_points.txt +5 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
"""Common functionality shared between clients and event server"""
|
|
2
|
+
|
|
3
|
+
import time
|
|
4
|
+
|
|
5
|
+
from hat.event.common.backend import (BackendClosedError,
|
|
6
|
+
Backend,
|
|
7
|
+
BackendConf,
|
|
8
|
+
BackendRegisteredEventsCb,
|
|
9
|
+
BackendFlushedEventsCb,
|
|
10
|
+
CreateBackend,
|
|
11
|
+
BackendInfo,
|
|
12
|
+
import_backend_info)
|
|
13
|
+
from hat.event.common.collection import (EventTypeCollection,
|
|
14
|
+
ListEventTypeCollection,
|
|
15
|
+
TreeEventTypeCollection)
|
|
16
|
+
from hat.event.common.common import (json_schema_repo,
|
|
17
|
+
sbs_repo,
|
|
18
|
+
ServerId,
|
|
19
|
+
SessionId,
|
|
20
|
+
InstanceId,
|
|
21
|
+
EventTypeSegment,
|
|
22
|
+
EventType,
|
|
23
|
+
Timestamp,
|
|
24
|
+
min_timestamp,
|
|
25
|
+
max_timestamp,
|
|
26
|
+
Status,
|
|
27
|
+
Order,
|
|
28
|
+
OrderBy,
|
|
29
|
+
EventId,
|
|
30
|
+
EventPayloadBinary,
|
|
31
|
+
EventPayloadJson,
|
|
32
|
+
EventPayload,
|
|
33
|
+
Event,
|
|
34
|
+
RegisterEvent,
|
|
35
|
+
QueryLatestParams,
|
|
36
|
+
QueryTimeseriesParams,
|
|
37
|
+
QueryServerParams,
|
|
38
|
+
QueryParams,
|
|
39
|
+
QueryResult)
|
|
40
|
+
from hat.event.common.encoder import (timestamp_to_bytes,
|
|
41
|
+
timestamp_from_bytes,
|
|
42
|
+
timestamp_to_float,
|
|
43
|
+
timestamp_from_float,
|
|
44
|
+
timestamp_to_datetime,
|
|
45
|
+
timestamp_from_datetime,
|
|
46
|
+
timestamp_to_sbs,
|
|
47
|
+
timestamp_from_sbs,
|
|
48
|
+
status_to_sbs,
|
|
49
|
+
status_from_sbs,
|
|
50
|
+
event_to_sbs,
|
|
51
|
+
event_from_sbs,
|
|
52
|
+
register_event_to_sbs,
|
|
53
|
+
register_event_from_sbs,
|
|
54
|
+
query_params_to_sbs,
|
|
55
|
+
query_params_from_sbs,
|
|
56
|
+
query_result_to_sbs,
|
|
57
|
+
query_result_from_sbs,
|
|
58
|
+
event_payload_to_sbs,
|
|
59
|
+
event_payload_from_sbs)
|
|
60
|
+
from hat.event.common.matches import matches_query_type
|
|
61
|
+
from hat.event.common.module import (SourceType,
|
|
62
|
+
Source,
|
|
63
|
+
Engine,
|
|
64
|
+
Module,
|
|
65
|
+
ModuleConf,
|
|
66
|
+
CreateModule,
|
|
67
|
+
ModuleInfo,
|
|
68
|
+
import_module_info)
|
|
69
|
+
from hat.event.common.subscription import (Subscription,
|
|
70
|
+
create_subscription)
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
__all__ = ['BackendClosedError',
|
|
74
|
+
'Backend',
|
|
75
|
+
'BackendConf',
|
|
76
|
+
'BackendRegisteredEventsCb',
|
|
77
|
+
'BackendFlushedEventsCb',
|
|
78
|
+
'CreateBackend',
|
|
79
|
+
'BackendInfo',
|
|
80
|
+
'import_backend_info',
|
|
81
|
+
'EventTypeCollection',
|
|
82
|
+
'ListEventTypeCollection',
|
|
83
|
+
'TreeEventTypeCollection',
|
|
84
|
+
'json_schema_repo',
|
|
85
|
+
'sbs_repo',
|
|
86
|
+
'ServerId',
|
|
87
|
+
'SessionId',
|
|
88
|
+
'InstanceId',
|
|
89
|
+
'EventTypeSegment',
|
|
90
|
+
'EventType',
|
|
91
|
+
'Timestamp',
|
|
92
|
+
'min_timestamp',
|
|
93
|
+
'max_timestamp',
|
|
94
|
+
'Status',
|
|
95
|
+
'Order',
|
|
96
|
+
'OrderBy',
|
|
97
|
+
'EventId',
|
|
98
|
+
'EventPayloadBinary',
|
|
99
|
+
'EventPayloadJson',
|
|
100
|
+
'EventPayload',
|
|
101
|
+
'Event',
|
|
102
|
+
'RegisterEvent',
|
|
103
|
+
'QueryLatestParams',
|
|
104
|
+
'QueryTimeseriesParams',
|
|
105
|
+
'QueryServerParams',
|
|
106
|
+
'QueryParams',
|
|
107
|
+
'QueryResult',
|
|
108
|
+
'timestamp_to_bytes',
|
|
109
|
+
'timestamp_from_bytes',
|
|
110
|
+
'timestamp_to_float',
|
|
111
|
+
'timestamp_from_float',
|
|
112
|
+
'timestamp_to_datetime',
|
|
113
|
+
'timestamp_from_datetime',
|
|
114
|
+
'timestamp_to_sbs',
|
|
115
|
+
'timestamp_from_sbs',
|
|
116
|
+
'status_to_sbs',
|
|
117
|
+
'status_from_sbs',
|
|
118
|
+
'event_to_sbs',
|
|
119
|
+
'event_from_sbs',
|
|
120
|
+
'register_event_to_sbs',
|
|
121
|
+
'register_event_from_sbs',
|
|
122
|
+
'query_params_to_sbs',
|
|
123
|
+
'query_params_from_sbs',
|
|
124
|
+
'query_result_to_sbs',
|
|
125
|
+
'query_result_from_sbs',
|
|
126
|
+
'event_payload_to_sbs',
|
|
127
|
+
'event_payload_from_sbs',
|
|
128
|
+
'matches_query_type',
|
|
129
|
+
'SourceType',
|
|
130
|
+
'Source',
|
|
131
|
+
'Engine',
|
|
132
|
+
'Module',
|
|
133
|
+
'ModuleConf',
|
|
134
|
+
'CreateModule',
|
|
135
|
+
'ModuleInfo',
|
|
136
|
+
'import_module_info',
|
|
137
|
+
'Subscription',
|
|
138
|
+
'create_subscription',
|
|
139
|
+
'now']
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
def now() -> Timestamp:
|
|
143
|
+
"""Create new timestamp representing current time"""
|
|
144
|
+
return timestamp_from_float(time.time())
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
from collections.abc import Collection
|
|
2
|
+
import abc
|
|
3
|
+
import importlib
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from hat import aio
|
|
7
|
+
from hat import json
|
|
8
|
+
|
|
9
|
+
from hat.event.common.common import (EventId,
|
|
10
|
+
Event,
|
|
11
|
+
QueryParams,
|
|
12
|
+
QueryResult)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class BackendClosedError(Exception):
|
|
16
|
+
"""Backend closed"""
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Backend(aio.Resource):
|
|
20
|
+
"""Backend ABC"""
|
|
21
|
+
|
|
22
|
+
@abc.abstractmethod
|
|
23
|
+
async def get_last_event_id(self,
|
|
24
|
+
server_id: int
|
|
25
|
+
) -> EventId:
|
|
26
|
+
"""Get last registered event id associated with server id"""
|
|
27
|
+
|
|
28
|
+
@abc.abstractmethod
|
|
29
|
+
async def register(self,
|
|
30
|
+
events: Collection[Event]
|
|
31
|
+
) -> Collection[Event] | None:
|
|
32
|
+
"""Register events"""
|
|
33
|
+
|
|
34
|
+
@abc.abstractmethod
|
|
35
|
+
async def query(self,
|
|
36
|
+
params: QueryParams
|
|
37
|
+
) -> QueryResult:
|
|
38
|
+
"""Query events"""
|
|
39
|
+
|
|
40
|
+
@abc.abstractmethod
|
|
41
|
+
async def flush(self):
|
|
42
|
+
"""Flush internal buffers and permanently persist events"""
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
BackendConf: typing.TypeAlias = json.Data
|
|
46
|
+
"""Backend configuration"""
|
|
47
|
+
|
|
48
|
+
BackendRegisteredEventsCb: typing.TypeAlias = aio.AsyncCallable[
|
|
49
|
+
[Collection[Event]],
|
|
50
|
+
None]
|
|
51
|
+
"""Backend registered events callback"""
|
|
52
|
+
|
|
53
|
+
BackendFlushedEventsCb: typing.TypeAlias = aio.AsyncCallable[
|
|
54
|
+
[Collection[Event]],
|
|
55
|
+
None]
|
|
56
|
+
"""Backend flushed events callback"""
|
|
57
|
+
|
|
58
|
+
CreateBackend: typing.TypeAlias = aio.AsyncCallable[
|
|
59
|
+
[BackendConf,
|
|
60
|
+
BackendRegisteredEventsCb | None,
|
|
61
|
+
BackendFlushedEventsCb | None],
|
|
62
|
+
Backend]
|
|
63
|
+
"""Create backend callable"""
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class BackendInfo(typing.NamedTuple):
|
|
67
|
+
"""Backend info
|
|
68
|
+
|
|
69
|
+
Backend is implemented as python module which is dynamically imported.
|
|
70
|
+
It is expected that this module contains `info` which is instance of
|
|
71
|
+
`BackendInfo`.
|
|
72
|
+
|
|
73
|
+
If backend defines JSON schema repository and JSON schema id, JSON schema
|
|
74
|
+
repository will be used for additional validation of backend configuration
|
|
75
|
+
with JSON schema id.
|
|
76
|
+
|
|
77
|
+
"""
|
|
78
|
+
create: CreateBackend
|
|
79
|
+
json_schema_id: str | None = None
|
|
80
|
+
json_schema_repo: json.SchemaRepository | None = None
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def import_backend_info(py_module_str: str) -> BackendInfo:
|
|
84
|
+
"""Import backend info"""
|
|
85
|
+
py_module = importlib.import_module(py_module_str)
|
|
86
|
+
info = py_module.info
|
|
87
|
+
|
|
88
|
+
if not isinstance(info, BackendInfo):
|
|
89
|
+
raise Exception('invalid backend implementation')
|
|
90
|
+
|
|
91
|
+
return info
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
from hat.event.common.collection.common import EventTypeCollection
|
|
2
|
+
from hat.event.common.collection.list import ListEventTypeCollection
|
|
3
|
+
from hat.event.common.collection.tree import TreeEventTypeCollection
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
__all__ = ['EventTypeCollection',
|
|
7
|
+
'ListEventTypeCollection',
|
|
8
|
+
'TreeEventTypeCollection']
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
from collections.abc import Hashable, Iterable
|
|
2
|
+
import abc
|
|
3
|
+
import typing
|
|
4
|
+
|
|
5
|
+
from hat.event.common.common import EventType
|
|
6
|
+
from hat.event.common.subscription import Subscription
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
T = typing.TypeVar('T', bound=Hashable)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class EventTypeCollection(abc.ABC, typing.Generic[T]):
|
|
13
|
+
|
|
14
|
+
@abc.abstractmethod
|
|
15
|
+
def __init__(self):
|
|
16
|
+
pass
|
|
17
|
+
|
|
18
|
+
@abc.abstractmethod
|
|
19
|
+
def add(self, subscription: Subscription, value: T):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
@abc.abstractmethod
|
|
23
|
+
def remove(self, value: T):
|
|
24
|
+
pass
|
|
25
|
+
|
|
26
|
+
@abc.abstractmethod
|
|
27
|
+
def get(self, event_type: EventType) -> Iterable[T]:
|
|
28
|
+
pass
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from hat.event.common.collection import common
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ListEventTypeCollection(common.EventTypeCollection):
|
|
5
|
+
|
|
6
|
+
def __init__(self):
|
|
7
|
+
self._values = {}
|
|
8
|
+
|
|
9
|
+
def add(self, subscription, value):
|
|
10
|
+
self._values[value] = (self._values[value].union(subscription)
|
|
11
|
+
if value in self._values else subscription)
|
|
12
|
+
|
|
13
|
+
def remove(self, value):
|
|
14
|
+
self._values.pop(value, None)
|
|
15
|
+
|
|
16
|
+
def get(self, event_type):
|
|
17
|
+
for value, subscription in self._values.items():
|
|
18
|
+
if subscription.matches(event_type):
|
|
19
|
+
yield value
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import collections
|
|
2
|
+
import typing
|
|
3
|
+
|
|
4
|
+
from hat.event.common.collection import common
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class TreeEventTypeCollection(common.EventTypeCollection):
|
|
8
|
+
|
|
9
|
+
def __init__(self):
|
|
10
|
+
self._root = _create_node()
|
|
11
|
+
self._value_nodes = collections.defaultdict(collections.deque)
|
|
12
|
+
|
|
13
|
+
def add(self, subscription, value):
|
|
14
|
+
for query_type in subscription.get_query_types():
|
|
15
|
+
node = self._root
|
|
16
|
+
rest = query_type
|
|
17
|
+
|
|
18
|
+
while rest:
|
|
19
|
+
head, rest = rest[0], rest[1:]
|
|
20
|
+
if head == '*' and rest:
|
|
21
|
+
raise ValueError('invalid query type')
|
|
22
|
+
|
|
23
|
+
node = node.children[head]
|
|
24
|
+
|
|
25
|
+
if value in node.values:
|
|
26
|
+
return
|
|
27
|
+
|
|
28
|
+
node.values.add(value)
|
|
29
|
+
self._value_nodes[value].append(node)
|
|
30
|
+
|
|
31
|
+
def remove(self, value):
|
|
32
|
+
for node in self._value_nodes.pop(value, []):
|
|
33
|
+
node.values.remove(value)
|
|
34
|
+
|
|
35
|
+
def get(self, event_type):
|
|
36
|
+
return set(_get(self._root, event_type))
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class _Node(typing.NamedTuple):
|
|
40
|
+
values: set
|
|
41
|
+
children: collections.defaultdict[str, '_Node']
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _create_node():
|
|
45
|
+
return _Node(set(), collections.defaultdict(_create_node))
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def _get(node, event_type):
|
|
49
|
+
if '*' in node.children:
|
|
50
|
+
yield from node.children['*'].values
|
|
51
|
+
|
|
52
|
+
if not event_type:
|
|
53
|
+
yield from node.values
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
head, rest = event_type[0], event_type[1:]
|
|
57
|
+
|
|
58
|
+
if head in node.children:
|
|
59
|
+
yield from _get(node.children[head], rest)
|
|
60
|
+
|
|
61
|
+
if '?' in node.children:
|
|
62
|
+
yield from _get(node.children['?'], rest)
|
|
@@ -0,0 +1,176 @@
|
|
|
1
|
+
from collections.abc import Collection
|
|
2
|
+
import enum
|
|
3
|
+
import importlib.resources
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from hat import json
|
|
7
|
+
from hat import sbs
|
|
8
|
+
from hat import util
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
with importlib.resources.as_file(importlib.resources.files(__package__) /
|
|
12
|
+
'json_schema_repo.json') as _path:
|
|
13
|
+
json_schema_repo: json.SchemaRepository = json.SchemaRepository(
|
|
14
|
+
json.json_schema_repo,
|
|
15
|
+
json.SchemaRepository.from_json(_path))
|
|
16
|
+
"""JSON schema repository"""
|
|
17
|
+
|
|
18
|
+
with importlib.resources.as_file(importlib.resources.files(__package__) /
|
|
19
|
+
'sbs_repo.json') as _path:
|
|
20
|
+
sbs_repo: sbs.Repository = sbs.Repository.from_json(_path)
|
|
21
|
+
"""SBS schema repository"""
|
|
22
|
+
|
|
23
|
+
ServerId: typing.TypeAlias = int
|
|
24
|
+
"""Server identifier"""
|
|
25
|
+
|
|
26
|
+
SessionId: typing.TypeAlias = int
|
|
27
|
+
"""Session identifier"""
|
|
28
|
+
|
|
29
|
+
InstanceId: typing.TypeAlias = int
|
|
30
|
+
"""Event instance identifier"""
|
|
31
|
+
|
|
32
|
+
EventTypeSegment: typing.TypeAlias = str
|
|
33
|
+
"""Event type segment"""
|
|
34
|
+
|
|
35
|
+
EventType: typing.TypeAlias = tuple[EventTypeSegment, ...]
|
|
36
|
+
"""Event type"""
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class Timestamp(typing.NamedTuple):
|
|
40
|
+
s: int
|
|
41
|
+
"""seconds since 1970-01-01 (can be negative)"""
|
|
42
|
+
us: int
|
|
43
|
+
"""microseconds added to timestamp seconds in range [0, 1e6)"""
|
|
44
|
+
|
|
45
|
+
def add(self, s: float) -> 'Timestamp':
|
|
46
|
+
"""Create new timestamp by adding seconds to existing timestamp"""
|
|
47
|
+
us = self.us + round((s - int(s)) * 1e6)
|
|
48
|
+
s = self.s + int(s)
|
|
49
|
+
return Timestamp(s=s + us // int(1e6),
|
|
50
|
+
us=us % int(1e6))
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
min_timestamp: Timestamp = Timestamp(s=-(1 << 63), us=0)
|
|
54
|
+
"""Minimal byte serializable timestamp value"""
|
|
55
|
+
|
|
56
|
+
max_timestamp: Timestamp = Timestamp(s=(1 << 63) - 1, us=999_999)
|
|
57
|
+
"""Maximal byte serializable timestamp value"""
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class Status(enum.Enum):
|
|
61
|
+
STANDBY = 'standby'
|
|
62
|
+
STARTING = 'starting'
|
|
63
|
+
OPERATIONAL = 'operational'
|
|
64
|
+
STOPPING = 'stopping'
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
class Order(enum.Enum):
|
|
68
|
+
DESCENDING = 'descending'
|
|
69
|
+
ASCENDING = 'ascending'
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
class OrderBy(enum.Enum):
|
|
73
|
+
TIMESTAMP = 'timestamp'
|
|
74
|
+
SOURCE_TIMESTAMP = 'sourceTimestamp'
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class EventId(typing.NamedTuple):
|
|
78
|
+
server: ServerId
|
|
79
|
+
session: SessionId
|
|
80
|
+
instance: InstanceId
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
class EventPayloadBinary(typing.NamedTuple):
|
|
84
|
+
type: str
|
|
85
|
+
data: util.Bytes
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class EventPayloadJson(typing.NamedTuple):
|
|
89
|
+
data: json.Data
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
EventPayload: typing.TypeAlias = EventPayloadBinary | EventPayloadJson
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class Event(typing.NamedTuple):
|
|
96
|
+
"""Event
|
|
97
|
+
|
|
98
|
+
Operators `>` and `<` test for natural order where it is assumed that
|
|
99
|
+
first operand is registered before second operand.
|
|
100
|
+
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
id: EventId
|
|
104
|
+
type: EventType
|
|
105
|
+
timestamp: Timestamp
|
|
106
|
+
source_timestamp: Timestamp | None
|
|
107
|
+
payload: EventPayload | None
|
|
108
|
+
|
|
109
|
+
def __lt__(self, other):
|
|
110
|
+
if not isinstance(other, Event):
|
|
111
|
+
return NotImplemented
|
|
112
|
+
|
|
113
|
+
if self.id == other.id:
|
|
114
|
+
return False
|
|
115
|
+
|
|
116
|
+
if self.id.server == other.id.server:
|
|
117
|
+
return self.id < other.id
|
|
118
|
+
|
|
119
|
+
if self.timestamp != other.timestamp:
|
|
120
|
+
return self.timestamp < other.timestamp
|
|
121
|
+
|
|
122
|
+
return True
|
|
123
|
+
|
|
124
|
+
def __gt__(self, other):
|
|
125
|
+
if not isinstance(other, Event):
|
|
126
|
+
return NotImplemented
|
|
127
|
+
|
|
128
|
+
if self.id == other.id:
|
|
129
|
+
return False
|
|
130
|
+
|
|
131
|
+
if self.id.server == other.id.server:
|
|
132
|
+
return self.id > other.id
|
|
133
|
+
|
|
134
|
+
if self.timestamp != other.timestamp:
|
|
135
|
+
return self.timestamp > other.timestamp
|
|
136
|
+
|
|
137
|
+
return False
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class RegisterEvent(typing.NamedTuple):
|
|
141
|
+
type: EventType
|
|
142
|
+
source_timestamp: Timestamp | None
|
|
143
|
+
payload: EventPayload | None
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
class QueryLatestParams(typing.NamedTuple):
|
|
147
|
+
event_types: Collection[EventType] | None = None
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
class QueryTimeseriesParams(typing.NamedTuple):
|
|
151
|
+
event_types: Collection[EventType] | None = None
|
|
152
|
+
t_from: Timestamp | None = None
|
|
153
|
+
t_to: Timestamp | None = None
|
|
154
|
+
source_t_from: Timestamp | None = None
|
|
155
|
+
source_t_to: Timestamp | None = None
|
|
156
|
+
order: Order = Order.DESCENDING
|
|
157
|
+
order_by: OrderBy = OrderBy.TIMESTAMP
|
|
158
|
+
max_results: int | None = None
|
|
159
|
+
last_event_id: EventId | None = None
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
class QueryServerParams(typing.NamedTuple):
|
|
163
|
+
server_id: ServerId
|
|
164
|
+
persisted: bool = False
|
|
165
|
+
max_results: int | None = None
|
|
166
|
+
last_event_id: EventId | None = None
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
QueryParams: typing.TypeAlias = (QueryLatestParams |
|
|
170
|
+
QueryTimeseriesParams |
|
|
171
|
+
QueryServerParams)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
class QueryResult(typing.NamedTuple):
|
|
175
|
+
events: Collection[Event]
|
|
176
|
+
more_follows: bool
|