hat-event 0.9.27__cp310.cp311.cp312.cp313-abi3-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. hat/event/__init__.py +1 -0
  2. hat/event/adminer/__init__.py +18 -0
  3. hat/event/adminer/client.py +124 -0
  4. hat/event/adminer/common.py +27 -0
  5. hat/event/adminer/server.py +111 -0
  6. hat/event/backends/__init__.py +0 -0
  7. hat/event/backends/dummy.py +49 -0
  8. hat/event/backends/lmdb/__init__.py +9 -0
  9. hat/event/backends/lmdb/backend.py +319 -0
  10. hat/event/backends/lmdb/common.py +277 -0
  11. hat/event/backends/lmdb/conditions.py +102 -0
  12. hat/event/backends/lmdb/convert/__init__.py +0 -0
  13. hat/event/backends/lmdb/convert/__main__.py +8 -0
  14. hat/event/backends/lmdb/convert/convert_v06_to_v07.py +213 -0
  15. hat/event/backends/lmdb/convert/convert_v07_to_v09.py +175 -0
  16. hat/event/backends/lmdb/convert/main.py +88 -0
  17. hat/event/backends/lmdb/convert/v06.py +216 -0
  18. hat/event/backends/lmdb/convert/v07.py +508 -0
  19. hat/event/backends/lmdb/convert/v09.py +50 -0
  20. hat/event/backends/lmdb/convert/version.py +63 -0
  21. hat/event/backends/lmdb/environment.py +100 -0
  22. hat/event/backends/lmdb/latestdb.py +116 -0
  23. hat/event/backends/lmdb/manager/__init__.py +0 -0
  24. hat/event/backends/lmdb/manager/__main__.py +8 -0
  25. hat/event/backends/lmdb/manager/common.py +45 -0
  26. hat/event/backends/lmdb/manager/copy.py +92 -0
  27. hat/event/backends/lmdb/manager/main.py +34 -0
  28. hat/event/backends/lmdb/manager/query.py +215 -0
  29. hat/event/backends/lmdb/refdb.py +234 -0
  30. hat/event/backends/lmdb/systemdb.py +102 -0
  31. hat/event/backends/lmdb/timeseriesdb.py +486 -0
  32. hat/event/backends/memory.py +178 -0
  33. hat/event/common/__init__.py +144 -0
  34. hat/event/common/backend.py +91 -0
  35. hat/event/common/collection/__init__.py +8 -0
  36. hat/event/common/collection/common.py +28 -0
  37. hat/event/common/collection/list.py +19 -0
  38. hat/event/common/collection/tree.py +62 -0
  39. hat/event/common/common.py +176 -0
  40. hat/event/common/encoder.py +305 -0
  41. hat/event/common/json_schema_repo.json +1 -0
  42. hat/event/common/matches.py +44 -0
  43. hat/event/common/module.py +142 -0
  44. hat/event/common/sbs_repo.json +1 -0
  45. hat/event/common/subscription/__init__.py +22 -0
  46. hat/event/common/subscription/_csubscription.abi3.pyd +0 -0
  47. hat/event/common/subscription/common.py +145 -0
  48. hat/event/common/subscription/csubscription.py +47 -0
  49. hat/event/common/subscription/pysubscription.py +97 -0
  50. hat/event/component.py +284 -0
  51. hat/event/eventer/__init__.py +28 -0
  52. hat/event/eventer/client.py +260 -0
  53. hat/event/eventer/common.py +27 -0
  54. hat/event/eventer/server.py +286 -0
  55. hat/event/manager/__init__.py +0 -0
  56. hat/event/manager/__main__.py +8 -0
  57. hat/event/manager/common.py +48 -0
  58. hat/event/manager/main.py +387 -0
  59. hat/event/server/__init__.py +0 -0
  60. hat/event/server/__main__.py +8 -0
  61. hat/event/server/adminer_server.py +43 -0
  62. hat/event/server/engine.py +216 -0
  63. hat/event/server/engine_runner.py +127 -0
  64. hat/event/server/eventer_client.py +205 -0
  65. hat/event/server/eventer_client_runner.py +152 -0
  66. hat/event/server/eventer_server.py +119 -0
  67. hat/event/server/main.py +84 -0
  68. hat/event/server/main_runner.py +212 -0
  69. hat_event-0.9.27.dist-info/LICENSE +202 -0
  70. hat_event-0.9.27.dist-info/METADATA +108 -0
  71. hat_event-0.9.27.dist-info/RECORD +73 -0
  72. hat_event-0.9.27.dist-info/WHEEL +7 -0
  73. hat_event-0.9.27.dist-info/entry_points.txt +5 -0
@@ -0,0 +1,116 @@
1
+ import itertools
2
+ import typing
3
+
4
+ import lmdb
5
+
6
+ from hat.event.backends.lmdb import common
7
+ from hat.event.backends.lmdb import environment
8
+ from hat.event.backends.lmdb.conditions import Conditions
9
+
10
+
11
+ class Changes(typing.NamedTuple):
12
+ data: dict[common.EventTypeRef, common.Event]
13
+ types: dict[common.EventTypeRef, common.EventId]
14
+
15
+
16
+ class AddResult(typing.NamedTuple):
17
+ added_ref: common.EventRef | None
18
+ removed_ref: tuple[common.EventId, common.EventRef] | None
19
+
20
+
21
+ def ext_create(env: environment.Environment,
22
+ txn: lmdb.Transaction,
23
+ conditions: Conditions,
24
+ subscription: common.Subscription
25
+ ) -> 'LatestDb':
26
+ db = LatestDb()
27
+ db._env = env
28
+ db._subscription = subscription
29
+ db._changes = Changes({}, {})
30
+
31
+ db._event_type_refs = {
32
+ event_type: ref
33
+ for ref, event_type in env.ext_read(txn, common.DbType.LATEST_TYPE)}
34
+
35
+ db._events = {
36
+ event.type: event
37
+ for ref, event in env.ext_read(txn, common.DbType.LATEST_DATA)
38
+ if conditions.matches(event) and
39
+ subscription.matches(event.type) and
40
+ db._event_type_refs.get(event.type) == ref}
41
+
42
+ db._next_event_type_refs = itertools.count(
43
+ max(db._event_type_refs.values(), default=0) + 1)
44
+
45
+ return db
46
+
47
+
48
+ class LatestDb:
49
+
50
+ def add(self,
51
+ event: common.Event
52
+ ) -> AddResult:
53
+ if not self._subscription.matches(event.type):
54
+ return AddResult(added_ref=None,
55
+ removed_ref=None)
56
+
57
+ previous_event = self._events.get(event.type)
58
+ if previous_event and previous_event > event:
59
+ return AddResult(added_ref=None,
60
+ removed_ref=None)
61
+
62
+ event_type_ref = self._event_type_refs.get(event.type)
63
+ if event_type_ref is None:
64
+ event_type_ref = next(self._next_event_type_refs)
65
+ self._changes.types[event_type_ref] = event.type
66
+ self._event_type_refs[event.type] = event_type_ref
67
+
68
+ self._changes.data[event_type_ref] = event
69
+ self._events[event.type] = event
70
+
71
+ added_ref = common.LatestEventRef(event_type_ref)
72
+ removed_ref = ((previous_event.id, added_ref)
73
+ if previous_event else None)
74
+ return AddResult(added_ref=added_ref,
75
+ removed_ref=removed_ref)
76
+
77
+ def query(self,
78
+ params: common.QueryLatestParams
79
+ ) -> common.QueryResult:
80
+ event_types = (set(params.event_types)
81
+ if params.event_types is not None
82
+ else None)
83
+
84
+ if event_types is None or ('*', ) in event_types:
85
+ events = self._events.values()
86
+
87
+ elif any('*' in event_type or '?' in event_type
88
+ for event_type in event_types):
89
+ subscription = common.create_subscription(event_types)
90
+ events = (event for event in self._events.values()
91
+ if subscription.matches(event.type))
92
+
93
+ elif len(event_types) < len(self._events):
94
+ events = (self._events.get(event_type)
95
+ for event_type in event_types)
96
+ events = (event for event in events if event)
97
+
98
+ else:
99
+ events = (event for event in self._events.values()
100
+ if event.type in event_types)
101
+
102
+ return common.QueryResult(events=list(events),
103
+ more_follows=False)
104
+
105
+ def create_changes(self) -> Changes:
106
+ changes, self._changes = self._changes, Changes({}, {})
107
+ return changes
108
+
109
+ def ext_write(self,
110
+ txn: lmdb.Transaction,
111
+ changes: Changes):
112
+ self._env.ext_write(txn, common.DbType.LATEST_DATA,
113
+ changes.data.items())
114
+
115
+ self._env.ext_write(txn, common.DbType.LATEST_TYPE,
116
+ changes.types.items())
File without changes
@@ -0,0 +1,8 @@
1
+ import sys
2
+
3
+ from hat.event.backends.lmdb.manager.main import main
4
+
5
+
6
+ if __name__ == '__main__':
7
+ sys.argv[0] = 'hat-event-lmdb-manager'
8
+ sys.exit(main())
@@ -0,0 +1,45 @@
1
+ from hat.event.backends.lmdb.common import * # NOQA
2
+
3
+ from hat import json
4
+
5
+ from hat.event.backends.lmdb.common import (EventId,
6
+ Timestamp,
7
+ Event,
8
+ EventPayloadJson,
9
+ EventPayloadBinary)
10
+
11
+
12
+ def event_id_to_json(event_id: EventId) -> json.Data:
13
+ return {'server': event_id.server,
14
+ 'session': event_id.session,
15
+ 'instance': event_id.instance}
16
+
17
+
18
+ def timestamp_to_json(timestamp: Timestamp) -> json.Data:
19
+ return {'s': timestamp.s,
20
+ 'us': timestamp.us}
21
+
22
+
23
+ def event_to_json(event: Event) -> json.Data:
24
+ return {'id': event_id_to_json(event.id),
25
+ 'type': list(event.type),
26
+ 'timestamp': timestamp_to_json(event.timestamp),
27
+ 'source_timestamp': (timestamp_to_json(event.source_timestamp)
28
+ if event.source_timestamp else None),
29
+ 'payload': _event_payload_to_json(event.payload)}
30
+
31
+
32
+ def _event_payload_to_json(payload):
33
+ if payload is None:
34
+ return None
35
+
36
+ if isinstance(payload, EventPayloadBinary):
37
+ return {'type': 'BINARY',
38
+ 'subtype': payload.type,
39
+ 'data': bytes(payload.data).hex()}
40
+
41
+ if isinstance(payload, EventPayloadJson):
42
+ return {'type': 'JSON',
43
+ 'data': payload.data}
44
+
45
+ raise ValueError('unsupported payload type')
@@ -0,0 +1,92 @@
1
+ from pathlib import Path
2
+ import argparse
3
+
4
+ from hat.event.backends.lmdb.manager import common
5
+
6
+
7
+ def create_argument_parser(subparsers) -> argparse.ArgumentParser:
8
+ parser = subparsers.add_parser('copy')
9
+ parser.add_argument('--skip-latest', action='store_true')
10
+ parser.add_argument('--skip-timeseries', action='store_true')
11
+ parser.add_argument('src_path', type=Path)
12
+ parser.add_argument('dst_path', type=Path)
13
+ return parser
14
+
15
+
16
+ def copy(args):
17
+ if args.dst_path.exists():
18
+ raise Exception('destination db already exists')
19
+
20
+ with common.ext_create_env(args.src_path, readonly=True) as src_env:
21
+ with common.ext_create_env(args.dst_path) as dst_env:
22
+ src_dbs = {db_type: common.ext_open_db(src_env, db_type, False)
23
+ for db_type in common.DbType}
24
+ dst_dbs = {db_type: common.ext_open_db(dst_env, db_type, True)
25
+ for db_type in common.DbType}
26
+
27
+ with src_env.begin(buffers=True) as src_txn:
28
+ with dst_env.begin(write=True) as dst_txn:
29
+ _copy_system(src_dbs, src_txn, dst_dbs, dst_txn)
30
+ _copy_ref(src_dbs, src_txn, dst_dbs, dst_txn,
31
+ args.skip_latest, args.skip_timeseries)
32
+
33
+ if not args.skip_latest:
34
+ _copy_latest(src_dbs, src_txn, dst_dbs, dst_txn)
35
+
36
+ if not args.skip_timeseries:
37
+ _copy_timeseries(src_dbs, src_txn, dst_dbs, dst_txn)
38
+
39
+
40
+ def _copy_system(src_dbs, src_txn, dst_dbs, dst_txn):
41
+ for db_type in [common.DbType.SYSTEM_SETTINGS,
42
+ common.DbType.SYSTEM_LAST_EVENT_ID,
43
+ common.DbType.SYSTEM_LAST_TIMESTAMP]:
44
+ _copy_db(src_dbs[db_type], src_txn, dst_dbs[db_type], dst_txn)
45
+
46
+
47
+ def _copy_ref(src_dbs, src_txn, dst_dbs, dst_txn, skip_latest,
48
+ skip_timeseries):
49
+ db_type = common.DbType.REF
50
+ db_def = common.db_defs[db_type]
51
+
52
+ for key, value in src_txn.cursor(db=src_dbs[db_type]):
53
+ if not skip_latest and not skip_timeseries:
54
+ dst_txn.put(key, value, db=dst_dbs[db_type])
55
+ continue
56
+
57
+ event_refs = db_def.decode_value(value)
58
+
59
+ if skip_latest:
60
+ event_refs = (
61
+ event_ref for event_ref in event_refs
62
+ if not isinstance(event_ref, common.LatestEventRef))
63
+
64
+ if skip_timeseries:
65
+ event_refs = (
66
+ event_ref for event_ref in event_refs
67
+ if not isinstance(event_ref, common.TimeseriesEventRef))
68
+
69
+ event_refs = set(event_refs)
70
+ if not event_refs:
71
+ continue
72
+
73
+ value = db_def.encode_value(event_refs)
74
+ dst_txn.put(key, value, db=dst_dbs[db_type])
75
+
76
+
77
+ def _copy_latest(src_dbs, src_txn, dst_dbs, dst_txn):
78
+ for db_type in [common.DbType.LATEST_DATA,
79
+ common.DbType.LATEST_TYPE]:
80
+ _copy_db(src_dbs[db_type], src_txn, dst_dbs[db_type], dst_txn)
81
+
82
+
83
+ def _copy_timeseries(src_dbs, src_txn, dst_dbs, dst_txn):
84
+ for db_type in [common.DbType.TIMESERIES_DATA,
85
+ common.DbType.TIMESERIES_PARTITION,
86
+ common.DbType.TIMESERIES_COUNT]:
87
+ _copy_db(src_dbs[db_type], src_txn, dst_dbs[db_type], dst_txn)
88
+
89
+
90
+ def _copy_db(src_db, src_txn, dst_db, dst_txn):
91
+ for key, value in src_txn.cursor(db=src_db):
92
+ dst_txn.put(key, value, db=dst_db)
@@ -0,0 +1,34 @@
1
+ import argparse
2
+ import sys
3
+
4
+ from hat.event.backends.lmdb.manager import copy
5
+ from hat.event.backends.lmdb.manager import query
6
+
7
+
8
+ def create_argument_parser() -> argparse.ArgumentParser:
9
+ parser = argparse.ArgumentParser()
10
+ subparsers = parser.add_subparsers(dest='action', required=True)
11
+
12
+ query.create_argument_parser(subparsers)
13
+ copy.create_argument_parser(subparsers)
14
+
15
+ return parser
16
+
17
+
18
+ def main():
19
+ parser = create_argument_parser()
20
+ args = parser.parse_args()
21
+
22
+ if args.action == 'query':
23
+ query.query(args)
24
+
25
+ elif args.action == 'copy':
26
+ copy.copy(args)
27
+
28
+ else:
29
+ raise ValueError('unsupported action')
30
+
31
+
32
+ if __name__ == '__main__':
33
+ sys.argv[0] = 'hat-event-lmdb-manager'
34
+ sys.exit(main())
@@ -0,0 +1,215 @@
1
+ from pathlib import Path
2
+ import argparse
3
+
4
+ from hat import json
5
+
6
+ from hat.event.backends.lmdb.manager import common
7
+
8
+
9
+ def create_argument_parser(subparsers) -> argparse.ArgumentParser:
10
+ parser = subparsers.add_parser('query')
11
+ parser.add_argument('--db', metavar='PATH', type=Path, required=True)
12
+ subsubparsers = parser.add_subparsers(dest='subaction', required=True)
13
+
14
+ subsubparsers.add_parser('settings')
15
+
16
+ last_event_id_parser = subsubparsers.add_parser('last_event_id')
17
+ last_event_id_parser.add_argument('--server-id', type=int, default=None)
18
+
19
+ last_timestamps_parser = subsubparsers.add_parser('last_timestamp')
20
+ last_timestamps_parser.add_argument('--server-id', type=int, default=None)
21
+
22
+ ref_parser = subsubparsers.add_parser('ref')
23
+ ref_parser.add_argument('--server-id', type=int, default=None)
24
+
25
+ latest_parser = subsubparsers.add_parser('latest')
26
+ latest_parser.add_argument('--event-types', type=str, default=None,
27
+ nargs='*')
28
+
29
+ partition_parser = subsubparsers.add_parser('partition')
30
+ partition_parser.add_argument('--partition-id', type=int, default=None)
31
+
32
+ timeseries_parser = subsubparsers.add_parser('timeseries')
33
+ timeseries_parser.add_argument('--partition-id', type=int, default=None)
34
+ timeseries_parser.add_argument('--server-id', type=int, default=None)
35
+ timeseries_parser.add_argument('--event-types', type=str, default=None,
36
+ nargs='*')
37
+
38
+ return parser
39
+
40
+
41
+ def query(args):
42
+ with common.ext_create_env(args.db, readonly=True) as env:
43
+ dbs = {db_type: common.ext_open_db(env, db_type, False)
44
+ for db_type in common.DbType}
45
+
46
+ with env.begin(buffers=True) as txn:
47
+ if args.subaction == 'settings':
48
+ for result in _query_settings(dbs, txn):
49
+ _print_result(result)
50
+
51
+ elif args.subaction == 'last_event_id':
52
+ for result in _query_last_event_id(dbs, txn, args.server_id):
53
+ _print_result(result)
54
+
55
+ elif args.subaction == 'last_timestamp':
56
+ for result in _query_last_timestamp(dbs, txn, args.server_id):
57
+ _print_result(result)
58
+
59
+ elif args.subaction == 'ref':
60
+ for result in _query_ref(dbs, txn):
61
+ _print_result(result)
62
+
63
+ elif args.subaction == 'latest':
64
+ event_types = ([tuple(i.split('/')) for i in args.event_types]
65
+ if args.event_types is not None else None)
66
+ for result in _query_ref(dbs, txn, event_types):
67
+ _print_result(result)
68
+
69
+ elif args.subaction == 'partition':
70
+ for result in _query_partition(dbs, txn, args.partition_id):
71
+ _print_result(result)
72
+
73
+ elif args.subaction == 'timeseries':
74
+ event_types = ([tuple(i.split('/')) for i in args.event_types]
75
+ if args.event_types is not None else None)
76
+ for result in _query_timeseries(dbs, txn, args.partition_id,
77
+ args.server_id, event_types):
78
+ _print_result(result)
79
+
80
+ else:
81
+ raise ValueError('unsupported subaction')
82
+
83
+
84
+ def _print_result(result):
85
+ print(json.encode(result))
86
+
87
+
88
+ def _query_settings(dbs, txn):
89
+ db_type = common.DbType.SYSTEM_SETTINGS
90
+ db = dbs[db_type]
91
+ db_def = common.db_defs[db_type]
92
+
93
+ for key, value in txn.cursor(db=db):
94
+ settings_id = db_def.decode_key(key)
95
+ data = db_def.decode_value(value)
96
+
97
+ yield {'settings_id': settings_id.name,
98
+ 'data': data}
99
+
100
+
101
+ def _query_last_event_id(dbs, txn, server_id):
102
+ db_type = common.DbType.SYSTEM_LAST_EVENT_ID
103
+ db = dbs[db_type]
104
+ db_def = common.db_defs[db_type]
105
+
106
+ for key, value in txn.cursor(db=db):
107
+ decoded_server_id = db_def.decode_key(key)
108
+ if server_id is not None and server_id != decoded_server_id:
109
+ continue
110
+
111
+ event_id = db_def.decode_value(value)
112
+ yield common.event_id_to_json(event_id)
113
+
114
+
115
+ def _query_last_timestamp(dbs, txn, server_id):
116
+ db_type = common.DbType.SYSTEM_LAST_TIMESTAMP
117
+ db = dbs[db_type]
118
+ db_def = common.db_defs[db_type]
119
+
120
+ for key, value in txn.cursor(db=db):
121
+ decoded_server_id = db_def.decode_key(key)
122
+ if server_id is not None and server_id != decoded_server_id:
123
+ continue
124
+
125
+ timestamp = db_def.decode_value(value)
126
+ yield common.timestamp_to_json(timestamp)
127
+
128
+
129
+ def _query_ref(dbs, txn, server_id):
130
+ ref_db_type = common.DbType.REF
131
+ ref_db = dbs[ref_db_type]
132
+ ref_db_def = common.db_defs[ref_db_type]
133
+
134
+ latest_type_db_type = common.DbType.LATEST_TYPE
135
+ latest_type_db = dbs[latest_type_db_type]
136
+ latest_type_db_def = common.db_defs[latest_type_db_type]
137
+
138
+ for key, value in txn.cursor(db=ref_db):
139
+ event_id = ref_db_def.decode_key(key)
140
+ if server_id is not None and event_id.server != server_id:
141
+ continue
142
+
143
+ event_refs = ref_db_def.decode_value(value)
144
+
145
+ for event_ref in event_refs:
146
+ if isinstance(event_ref, common.LatestEventRef):
147
+ latest_type_key = latest_type_db_def.encoded_key(event_ref.key)
148
+ latest_type_value = txn.get(latest_type_key, db=latest_type_db)
149
+
150
+ event_type = latest_type_db_def.decode_value(latest_type_value)
151
+ yield {'event_id': common.event_id_to_json(event_id),
152
+ 'ref_type': 'latest',
153
+ 'event_type': list(event_type)}
154
+
155
+ if isinstance(event_ref, common.TimeseriesEventRef):
156
+ partition_id, timestamp, _ = event_ref.key
157
+ yield {'event_id': common.event_id_to_json(event_id),
158
+ 'ref_type': 'timeseries',
159
+ 'partition_id': partition_id,
160
+ 'timestamp': common.timestamp_to_json(timestamp)}
161
+
162
+
163
+ def _query_latest(dbs, txn, event_types):
164
+ db_type = common.DbType.LATEST_DATA
165
+ db = dbs[db_type]
166
+ db_def = common.db_defs[db_type]
167
+
168
+ subscription = common.create_subscription([('*', )] if event_types is None
169
+ else event_types)
170
+
171
+ for _, value in txn.cursor(db=db):
172
+ event = db_def.decode_value(value)
173
+ if not subscription.matches(event.type):
174
+ continue
175
+
176
+ yield common.event_to_json(value)
177
+
178
+
179
+ def _query_partition(dbs, txn, partition_id):
180
+ db_type = common.DbType.TIMESERIES_PARTITION
181
+ db = dbs[db_type]
182
+ db_def = common.db_defs[db_type]
183
+
184
+ for key, value in txn.cursor(db=db):
185
+ decoded_partition_id = db_def.decode_key(key)
186
+ if partition_id is not None and partition_id != decoded_partition_id:
187
+ continue
188
+
189
+ data = db_def.decode_value(value)
190
+ yield {'partition_id': decoded_partition_id,
191
+ 'data': data}
192
+
193
+
194
+ def _query_timeseries(dbs, txn, partition_id, server_id, event_types):
195
+ db_type = common.DbType.TIMESERIES_DATA
196
+ db = dbs[db_type]
197
+ db_def = common.db_defs[db_type]
198
+
199
+ subscription = common.create_subscription([('*', )] if event_types is None
200
+ else event_types)
201
+
202
+ for key, value in txn.cursor(db=db):
203
+ decoded_partition_id, _, event_id = db_def.decode(key)
204
+
205
+ if partition_id is not None and partition_id != decoded_partition_id:
206
+ continue
207
+
208
+ if server_id is not None and server_id != event_id.server:
209
+ continue
210
+
211
+ event = db_def.decode_value(value)
212
+ if not subscription.matches(event.type):
213
+ continue
214
+
215
+ yield common.event_to_json(value)