aprsd 1.0.0__py3-none-any.whl → 3.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aprsd/__init__.py +6 -4
- aprsd/cli_helper.py +151 -0
- aprsd/client/__init__.py +13 -0
- aprsd/client/aprsis.py +132 -0
- aprsd/client/base.py +105 -0
- aprsd/client/drivers/__init__.py +0 -0
- aprsd/client/drivers/aprsis.py +224 -0
- aprsd/client/drivers/fake.py +73 -0
- aprsd/client/drivers/kiss.py +119 -0
- aprsd/client/factory.py +88 -0
- aprsd/client/fake.py +48 -0
- aprsd/client/kiss.py +103 -0
- aprsd/client/stats.py +38 -0
- aprsd/cmds/__init__.py +0 -0
- aprsd/cmds/completion.py +22 -0
- aprsd/cmds/dev.py +162 -0
- aprsd/cmds/fetch_stats.py +156 -0
- aprsd/cmds/healthcheck.py +86 -0
- aprsd/cmds/list_plugins.py +319 -0
- aprsd/cmds/listen.py +230 -0
- aprsd/cmds/send_message.py +174 -0
- aprsd/cmds/server.py +142 -0
- aprsd/cmds/webchat.py +681 -0
- aprsd/conf/__init__.py +56 -0
- aprsd/conf/client.py +131 -0
- aprsd/conf/common.py +302 -0
- aprsd/conf/log.py +65 -0
- aprsd/conf/opts.py +80 -0
- aprsd/conf/plugin_common.py +191 -0
- aprsd/conf/plugin_email.py +105 -0
- aprsd/exception.py +13 -0
- aprsd/log/__init__.py +0 -0
- aprsd/log/log.py +138 -0
- aprsd/main.py +104 -867
- aprsd/messaging.py +4 -0
- aprsd/packets/__init__.py +12 -0
- aprsd/packets/collector.py +56 -0
- aprsd/packets/core.py +823 -0
- aprsd/packets/log.py +143 -0
- aprsd/packets/packet_list.py +116 -0
- aprsd/packets/seen_list.py +54 -0
- aprsd/packets/tracker.py +109 -0
- aprsd/packets/watch_list.py +122 -0
- aprsd/plugin.py +475 -284
- aprsd/plugin_utils.py +86 -0
- aprsd/plugins/__init__.py +0 -0
- aprsd/plugins/email.py +709 -0
- aprsd/plugins/fortune.py +61 -0
- aprsd/plugins/location.py +179 -0
- aprsd/plugins/notify.py +61 -0
- aprsd/plugins/ping.py +31 -0
- aprsd/plugins/time.py +115 -0
- aprsd/plugins/version.py +31 -0
- aprsd/plugins/weather.py +405 -0
- aprsd/stats/__init__.py +20 -0
- aprsd/stats/app.py +49 -0
- aprsd/stats/collector.py +38 -0
- aprsd/threads/__init__.py +11 -0
- aprsd/threads/aprsd.py +119 -0
- aprsd/threads/keep_alive.py +124 -0
- aprsd/threads/log_monitor.py +121 -0
- aprsd/threads/registry.py +56 -0
- aprsd/threads/rx.py +354 -0
- aprsd/threads/stats.py +44 -0
- aprsd/threads/tx.py +255 -0
- aprsd/utils/__init__.py +163 -0
- aprsd/utils/counter.py +51 -0
- aprsd/utils/json.py +80 -0
- aprsd/utils/objectstore.py +123 -0
- aprsd/utils/ring_buffer.py +40 -0
- aprsd/utils/trace.py +180 -0
- aprsd/web/__init__.py +0 -0
- aprsd/web/admin/__init__.py +0 -0
- aprsd/web/admin/static/css/index.css +84 -0
- aprsd/web/admin/static/css/prism.css +4 -0
- aprsd/web/admin/static/css/tabs.css +35 -0
- aprsd/web/admin/static/images/Untitled.png +0 -0
- aprsd/web/admin/static/images/aprs-symbols-16-0.png +0 -0
- aprsd/web/admin/static/images/aprs-symbols-16-1.png +0 -0
- aprsd/web/admin/static/images/aprs-symbols-64-0.png +0 -0
- aprsd/web/admin/static/images/aprs-symbols-64-1.png +0 -0
- aprsd/web/admin/static/images/aprs-symbols-64-2.png +0 -0
- aprsd/web/admin/static/js/charts.js +235 -0
- aprsd/web/admin/static/js/echarts.js +465 -0
- aprsd/web/admin/static/js/logs.js +26 -0
- aprsd/web/admin/static/js/main.js +231 -0
- aprsd/web/admin/static/js/prism.js +12 -0
- aprsd/web/admin/static/js/send-message.js +114 -0
- aprsd/web/admin/static/js/tabs.js +28 -0
- aprsd/web/admin/templates/index.html +196 -0
- aprsd/web/chat/static/css/chat.css +115 -0
- aprsd/web/chat/static/css/index.css +66 -0
- aprsd/web/chat/static/css/style.css.map +1 -0
- aprsd/web/chat/static/css/tabs.css +41 -0
- aprsd/web/chat/static/css/upstream/bootstrap.min.css +6 -0
- aprsd/web/chat/static/css/upstream/font.woff2 +0 -0
- aprsd/web/chat/static/css/upstream/google-fonts.css +23 -0
- aprsd/web/chat/static/css/upstream/jquery-ui.css +1311 -0
- aprsd/web/chat/static/css/upstream/jquery.toast.css +28 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Bold.woff +0 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Bold.woff2 +0 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Regular.woff +0 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Regular.woff2 +0 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/icons.woff +0 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/icons.woff2 +0 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/outline-icons.woff +0 -0
- aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/outline-icons.woff2 +0 -0
- aprsd/web/chat/static/images/Untitled.png +0 -0
- aprsd/web/chat/static/images/aprs-symbols-16-0.png +0 -0
- aprsd/web/chat/static/images/aprs-symbols-16-1.png +0 -0
- aprsd/web/chat/static/images/aprs-symbols-64-0.png +0 -0
- aprsd/web/chat/static/images/aprs-symbols-64-1.png +0 -0
- aprsd/web/chat/static/images/aprs-symbols-64-2.png +0 -0
- aprsd/web/chat/static/images/globe.svg +3 -0
- aprsd/web/chat/static/js/gps.js +84 -0
- aprsd/web/chat/static/js/main.js +45 -0
- aprsd/web/chat/static/js/send-message.js +585 -0
- aprsd/web/chat/static/js/tabs.js +28 -0
- aprsd/web/chat/static/js/upstream/bootstrap.bundle.min.js +7 -0
- aprsd/web/chat/static/js/upstream/jquery-3.7.1.min.js +2 -0
- aprsd/web/chat/static/js/upstream/jquery-ui.min.js +13 -0
- aprsd/web/chat/static/js/upstream/jquery.toast.js +374 -0
- aprsd/web/chat/static/js/upstream/semantic.min.js +11 -0
- aprsd/web/chat/static/js/upstream/socket.io.min.js +7 -0
- aprsd/web/chat/templates/index.html +139 -0
- aprsd/wsgi.py +315 -0
- aprsd-3.4.1.dist-info/AUTHORS +13 -0
- aprsd-3.4.1.dist-info/LICENSE +175 -0
- aprsd-3.4.1.dist-info/METADATA +799 -0
- aprsd-3.4.1.dist-info/RECORD +134 -0
- {aprsd-1.0.0.dist-info → aprsd-3.4.1.dist-info}/WHEEL +1 -1
- aprsd-3.4.1.dist-info/entry_points.txt +8 -0
- aprsd/fake_aprs.py +0 -83
- aprsd/utils.py +0 -166
- aprsd-1.0.0.dist-info/AUTHORS +0 -6
- aprsd-1.0.0.dist-info/METADATA +0 -181
- aprsd-1.0.0.dist-info/RECORD +0 -13
- aprsd-1.0.0.dist-info/entry_points.txt +0 -4
- aprsd-1.0.0.dist-info/pbr.json +0 -1
- /aprsd/{fuzzyclock.py → utils/fuzzyclock.py} +0 -0
- {aprsd-1.0.0.dist-info → aprsd-3.4.1.dist-info}/top_level.txt +0 -0
aprsd/threads/tx.py
ADDED
@@ -0,0 +1,255 @@
|
|
1
|
+
import logging
|
2
|
+
import threading
|
3
|
+
import time
|
4
|
+
|
5
|
+
from oslo_config import cfg
|
6
|
+
from rush import quota, throttle
|
7
|
+
from rush.contrib import decorator
|
8
|
+
from rush.limiters import periodic
|
9
|
+
from rush.stores import dictionary
|
10
|
+
import wrapt
|
11
|
+
|
12
|
+
from aprsd import conf # noqa
|
13
|
+
from aprsd import threads as aprsd_threads
|
14
|
+
from aprsd.client import client_factory
|
15
|
+
from aprsd.packets import collector, core
|
16
|
+
from aprsd.packets import log as packet_log
|
17
|
+
from aprsd.packets import tracker
|
18
|
+
|
19
|
+
|
20
|
+
CONF = cfg.CONF
|
21
|
+
LOG = logging.getLogger("APRSD")
|
22
|
+
|
23
|
+
msg_t = throttle.Throttle(
|
24
|
+
limiter=periodic.PeriodicLimiter(
|
25
|
+
store=dictionary.DictionaryStore(),
|
26
|
+
),
|
27
|
+
rate=quota.Quota.per_second(
|
28
|
+
count=CONF.msg_rate_limit_period,
|
29
|
+
),
|
30
|
+
)
|
31
|
+
ack_t = throttle.Throttle(
|
32
|
+
limiter=periodic.PeriodicLimiter(
|
33
|
+
store=dictionary.DictionaryStore(),
|
34
|
+
),
|
35
|
+
rate=quota.Quota.per_second(
|
36
|
+
count=CONF.ack_rate_limit_period,
|
37
|
+
),
|
38
|
+
)
|
39
|
+
|
40
|
+
msg_throttle_decorator = decorator.ThrottleDecorator(throttle=msg_t)
|
41
|
+
ack_throttle_decorator = decorator.ThrottleDecorator(throttle=ack_t)
|
42
|
+
s_lock = threading.Lock()
|
43
|
+
|
44
|
+
|
45
|
+
@wrapt.synchronized(s_lock)
|
46
|
+
@msg_throttle_decorator.sleep_and_retry
|
47
|
+
def send(packet: core.Packet, direct=False, aprs_client=None):
|
48
|
+
"""Send a packet either in a thread or directly to the client."""
|
49
|
+
# prepare the packet for sending.
|
50
|
+
# This constructs the packet.raw
|
51
|
+
packet.prepare()
|
52
|
+
# Have to call the collector to track the packet
|
53
|
+
# After prepare, as prepare assigns the msgNo
|
54
|
+
collector.PacketCollector().tx(packet)
|
55
|
+
if isinstance(packet, core.AckPacket):
|
56
|
+
_send_ack(packet, direct=direct, aprs_client=aprs_client)
|
57
|
+
else:
|
58
|
+
_send_packet(packet, direct=direct, aprs_client=aprs_client)
|
59
|
+
|
60
|
+
|
61
|
+
@msg_throttle_decorator.sleep_and_retry
|
62
|
+
def _send_packet(packet: core.Packet, direct=False, aprs_client=None):
|
63
|
+
if not direct:
|
64
|
+
thread = SendPacketThread(packet=packet)
|
65
|
+
thread.start()
|
66
|
+
else:
|
67
|
+
_send_direct(packet, aprs_client=aprs_client)
|
68
|
+
|
69
|
+
|
70
|
+
@ack_throttle_decorator.sleep_and_retry
|
71
|
+
def _send_ack(packet: core.AckPacket, direct=False, aprs_client=None):
|
72
|
+
if not direct:
|
73
|
+
thread = SendAckThread(packet=packet)
|
74
|
+
thread.start()
|
75
|
+
else:
|
76
|
+
_send_direct(packet, aprs_client=aprs_client)
|
77
|
+
|
78
|
+
|
79
|
+
def _send_direct(packet, aprs_client=None):
|
80
|
+
if aprs_client:
|
81
|
+
cl = aprs_client
|
82
|
+
else:
|
83
|
+
cl = client_factory.create()
|
84
|
+
|
85
|
+
packet.update_timestamp()
|
86
|
+
packet_log.log(packet, tx=True)
|
87
|
+
try:
|
88
|
+
cl.send(packet)
|
89
|
+
except Exception as e:
|
90
|
+
LOG.error(f"Failed to send packet: {packet}")
|
91
|
+
LOG.error(e)
|
92
|
+
|
93
|
+
|
94
|
+
class SendPacketThread(aprsd_threads.APRSDThread):
|
95
|
+
loop_count: int = 1
|
96
|
+
|
97
|
+
def __init__(self, packet):
|
98
|
+
self.packet = packet
|
99
|
+
super().__init__(f"TX-{packet.to_call}-{self.packet.msgNo}")
|
100
|
+
|
101
|
+
def loop(self):
|
102
|
+
"""Loop until a message is acked or it gets delayed.
|
103
|
+
|
104
|
+
We only sleep for 5 seconds between each loop run, so
|
105
|
+
that CTRL-C can exit the app in a short period. Each sleep
|
106
|
+
means the app quitting is blocked until sleep is done.
|
107
|
+
So we keep track of the last send attempt and only send if the
|
108
|
+
last send attempt is old enough.
|
109
|
+
|
110
|
+
"""
|
111
|
+
pkt_tracker = tracker.PacketTrack()
|
112
|
+
# lets see if the message is still in the tracking queue
|
113
|
+
packet = pkt_tracker.get(self.packet.msgNo)
|
114
|
+
if not packet:
|
115
|
+
# The message has been removed from the tracking queue
|
116
|
+
# So it got acked and we are done.
|
117
|
+
LOG.info(
|
118
|
+
f"{self.packet.__class__.__name__}"
|
119
|
+
f"({self.packet.msgNo}) "
|
120
|
+
"Message Send Complete via Ack.",
|
121
|
+
)
|
122
|
+
return False
|
123
|
+
else:
|
124
|
+
send_now = False
|
125
|
+
if packet.send_count >= packet.retry_count:
|
126
|
+
# we reached the send limit, don't send again
|
127
|
+
# TODO(hemna) - Need to put this in a delayed queue?
|
128
|
+
LOG.info(
|
129
|
+
f"{packet.__class__.__name__} "
|
130
|
+
f"({packet.msgNo}) "
|
131
|
+
"Message Send Complete. Max attempts reached"
|
132
|
+
f" {packet.retry_count}",
|
133
|
+
)
|
134
|
+
pkt_tracker.remove(packet.msgNo)
|
135
|
+
return False
|
136
|
+
|
137
|
+
# Message is still outstanding and needs to be acked.
|
138
|
+
if packet.last_send_time:
|
139
|
+
# Message has a last send time tracking
|
140
|
+
now = int(round(time.time()))
|
141
|
+
sleeptime = (packet.send_count + 1) * 31
|
142
|
+
delta = now - packet.last_send_time
|
143
|
+
if delta > sleeptime:
|
144
|
+
# It's time to try to send it again
|
145
|
+
send_now = True
|
146
|
+
else:
|
147
|
+
send_now = True
|
148
|
+
|
149
|
+
if send_now:
|
150
|
+
# no attempt time, so lets send it, and start
|
151
|
+
# tracking the time.
|
152
|
+
packet.last_send_time = int(round(time.time()))
|
153
|
+
_send_direct(packet)
|
154
|
+
packet.send_count += 1
|
155
|
+
|
156
|
+
time.sleep(1)
|
157
|
+
# Make sure we get called again.
|
158
|
+
self.loop_count += 1
|
159
|
+
return True
|
160
|
+
|
161
|
+
|
162
|
+
class SendAckThread(aprsd_threads.APRSDThread):
|
163
|
+
loop_count: int = 1
|
164
|
+
max_retries = 3
|
165
|
+
|
166
|
+
def __init__(self, packet):
|
167
|
+
self.packet = packet
|
168
|
+
super().__init__(f"TXAck-{packet.to_call}-{self.packet.msgNo}")
|
169
|
+
self.max_retries = CONF.default_ack_send_count
|
170
|
+
|
171
|
+
def loop(self):
|
172
|
+
"""Separate thread to send acks with retries."""
|
173
|
+
send_now = False
|
174
|
+
if self.packet.send_count == self.max_retries:
|
175
|
+
# we reached the send limit, don't send again
|
176
|
+
# TODO(hemna) - Need to put this in a delayed queue?
|
177
|
+
LOG.debug(
|
178
|
+
f"{self.packet.__class__.__name__}"
|
179
|
+
f"({self.packet.msgNo}) "
|
180
|
+
"Send Complete. Max attempts reached"
|
181
|
+
f" {self.max_retries}",
|
182
|
+
)
|
183
|
+
return False
|
184
|
+
|
185
|
+
if self.packet.last_send_time:
|
186
|
+
# Message has a last send time tracking
|
187
|
+
now = int(round(time.time()))
|
188
|
+
|
189
|
+
# aprs duplicate detection is 30 secs?
|
190
|
+
# (21 only sends first, 28 skips middle)
|
191
|
+
sleep_time = 31
|
192
|
+
delta = now - self.packet.last_send_time
|
193
|
+
if delta > sleep_time:
|
194
|
+
# It's time to try to send it again
|
195
|
+
send_now = True
|
196
|
+
elif self.loop_count % 10 == 0:
|
197
|
+
LOG.debug(f"Still wating. {delta}")
|
198
|
+
else:
|
199
|
+
send_now = True
|
200
|
+
|
201
|
+
if send_now:
|
202
|
+
_send_direct(self.packet)
|
203
|
+
self.packet.send_count += 1
|
204
|
+
self.packet.last_send_time = int(round(time.time()))
|
205
|
+
|
206
|
+
time.sleep(1)
|
207
|
+
self.loop_count += 1
|
208
|
+
return True
|
209
|
+
|
210
|
+
|
211
|
+
class BeaconSendThread(aprsd_threads.APRSDThread):
|
212
|
+
"""Thread that sends a GPS beacon packet periodically.
|
213
|
+
|
214
|
+
Settings are in the [DEFAULT] section of the config file.
|
215
|
+
"""
|
216
|
+
_loop_cnt: int = 1
|
217
|
+
|
218
|
+
def __init__(self):
|
219
|
+
super().__init__("BeaconSendThread")
|
220
|
+
self._loop_cnt = 1
|
221
|
+
# Make sure Latitude and Longitude are set.
|
222
|
+
if not CONF.latitude or not CONF.longitude:
|
223
|
+
LOG.error(
|
224
|
+
"Latitude and Longitude are not set in the config file."
|
225
|
+
"Beacon will not be sent and thread is STOPPED.",
|
226
|
+
)
|
227
|
+
self.stop()
|
228
|
+
LOG.info(
|
229
|
+
"Beacon thread is running and will send "
|
230
|
+
f"beacons every {CONF.beacon_interval} seconds.",
|
231
|
+
)
|
232
|
+
|
233
|
+
def loop(self):
|
234
|
+
# Only dump out the stats every N seconds
|
235
|
+
if self._loop_cnt % CONF.beacon_interval == 0:
|
236
|
+
pkt = core.BeaconPacket(
|
237
|
+
from_call=CONF.callsign,
|
238
|
+
to_call="APRS",
|
239
|
+
latitude=float(CONF.latitude),
|
240
|
+
longitude=float(CONF.longitude),
|
241
|
+
comment="APRSD GPS Beacon",
|
242
|
+
symbol=CONF.beacon_symbol,
|
243
|
+
)
|
244
|
+
try:
|
245
|
+
# Only send it once
|
246
|
+
pkt.retry_count = 1
|
247
|
+
send(pkt, direct=True)
|
248
|
+
except Exception as e:
|
249
|
+
LOG.error(f"Failed to send beacon: {e}")
|
250
|
+
client_factory.create().reset()
|
251
|
+
time.sleep(5)
|
252
|
+
|
253
|
+
self._loop_cnt += 1
|
254
|
+
time.sleep(1)
|
255
|
+
return True
|
aprsd/utils/__init__.py
ADDED
@@ -0,0 +1,163 @@
|
|
1
|
+
"""Utilities and helper functions."""
|
2
|
+
|
3
|
+
import errno
|
4
|
+
import functools
|
5
|
+
import os
|
6
|
+
import re
|
7
|
+
import sys
|
8
|
+
import traceback
|
9
|
+
|
10
|
+
import update_checker
|
11
|
+
|
12
|
+
import aprsd
|
13
|
+
|
14
|
+
from .fuzzyclock import fuzzy # noqa: F401
|
15
|
+
# Make these available by anyone importing
|
16
|
+
# aprsd.utils
|
17
|
+
from .ring_buffer import RingBuffer # noqa: F401
|
18
|
+
|
19
|
+
|
20
|
+
if sys.version_info.major == 3 and sys.version_info.minor >= 3:
|
21
|
+
from collections.abc import MutableMapping
|
22
|
+
else:
|
23
|
+
from collections.abc import MutableMapping
|
24
|
+
|
25
|
+
|
26
|
+
def singleton(cls):
|
27
|
+
"""Make a class a Singleton class (only one instance)"""
|
28
|
+
@functools.wraps(cls)
|
29
|
+
def wrapper_singleton(*args, **kwargs):
|
30
|
+
if wrapper_singleton.instance is None:
|
31
|
+
wrapper_singleton.instance = cls(*args, **kwargs)
|
32
|
+
return wrapper_singleton.instance
|
33
|
+
wrapper_singleton.instance = None
|
34
|
+
return wrapper_singleton
|
35
|
+
|
36
|
+
|
37
|
+
def env(*vars, **kwargs):
|
38
|
+
"""This returns the first environment variable set.
|
39
|
+
if none are non-empty, defaults to '' or keyword arg default
|
40
|
+
"""
|
41
|
+
for v in vars:
|
42
|
+
value = os.environ.get(v, None)
|
43
|
+
if value:
|
44
|
+
return value
|
45
|
+
return kwargs.get("default", "")
|
46
|
+
|
47
|
+
|
48
|
+
def mkdir_p(path):
|
49
|
+
"""Make directory and have it work in py2 and py3."""
|
50
|
+
try:
|
51
|
+
os.makedirs(path)
|
52
|
+
except OSError as exc: # Python >= 2.5
|
53
|
+
if exc.errno == errno.EEXIST and os.path.isdir(path):
|
54
|
+
pass
|
55
|
+
else:
|
56
|
+
raise
|
57
|
+
|
58
|
+
|
59
|
+
def insert_str(string, str_to_insert, index):
|
60
|
+
return string[:index] + str_to_insert + string[index:]
|
61
|
+
|
62
|
+
|
63
|
+
def end_substr(original, substr):
|
64
|
+
"""Get the index of the end of the <substr>.
|
65
|
+
|
66
|
+
So you can insert a string after <substr>
|
67
|
+
"""
|
68
|
+
idx = original.find(substr)
|
69
|
+
if idx != -1:
|
70
|
+
idx += len(substr)
|
71
|
+
return idx
|
72
|
+
|
73
|
+
|
74
|
+
def rgb_from_name(name):
|
75
|
+
"""Create an rgb tuple from a string."""
|
76
|
+
hash = 0
|
77
|
+
for char in name:
|
78
|
+
hash = ord(char) + ((hash << 5) - hash)
|
79
|
+
red = hash & 255
|
80
|
+
green = (hash >> 8) & 255
|
81
|
+
blue = (hash >> 16) & 255
|
82
|
+
return red, green, blue
|
83
|
+
|
84
|
+
|
85
|
+
def human_size(bytes, units=None):
|
86
|
+
"""Returns a human readable string representation of bytes"""
|
87
|
+
if not units:
|
88
|
+
units = [" bytes", "KB", "MB", "GB", "TB", "PB", "EB"]
|
89
|
+
return str(bytes) + units[0] if bytes < 1024 else human_size(bytes >> 10, units[1:])
|
90
|
+
|
91
|
+
|
92
|
+
def strfdelta(tdelta, fmt="{hours:{width}}:{minutes:{width}}:{seconds:{width}}"):
|
93
|
+
d = {
|
94
|
+
"days": tdelta.days,
|
95
|
+
"width": "02",
|
96
|
+
}
|
97
|
+
if tdelta.days > 0:
|
98
|
+
fmt = "{days} days " + fmt
|
99
|
+
|
100
|
+
d["hours"], rem = divmod(tdelta.seconds, 3600)
|
101
|
+
d["minutes"], d["seconds"] = divmod(rem, 60)
|
102
|
+
return fmt.format(**d)
|
103
|
+
|
104
|
+
|
105
|
+
def _check_version():
|
106
|
+
# check for a newer version
|
107
|
+
try:
|
108
|
+
check = update_checker.UpdateChecker()
|
109
|
+
result = check.check("aprsd", aprsd.__version__)
|
110
|
+
if result:
|
111
|
+
# Looks like there is an updated version.
|
112
|
+
return 1, result
|
113
|
+
else:
|
114
|
+
return 0, "APRSD is up to date"
|
115
|
+
except Exception:
|
116
|
+
# probably can't get in touch with pypi for some reason
|
117
|
+
# Lets put up an error and move on. We might not
|
118
|
+
# have internet in this aprsd deployment.
|
119
|
+
return 1, "Couldn't check for new version of APRSD"
|
120
|
+
|
121
|
+
|
122
|
+
def flatten_dict(d, parent_key="", sep="."):
|
123
|
+
"""Flatten a dict to key.key.key = value."""
|
124
|
+
items = []
|
125
|
+
for k, v in d.items():
|
126
|
+
new_key = parent_key + sep + k if parent_key else k
|
127
|
+
if isinstance(v, MutableMapping):
|
128
|
+
items.extend(flatten_dict(v, new_key, sep=sep).items())
|
129
|
+
else:
|
130
|
+
items.append((new_key, v))
|
131
|
+
return dict(items)
|
132
|
+
|
133
|
+
|
134
|
+
def parse_delta_str(s):
|
135
|
+
if "day" in s:
|
136
|
+
m = re.match(
|
137
|
+
r"(?P<days>[-\d]+) day[s]*, (?P<hours>\d+):(?P<minutes>\d+):(?P<seconds>\d[\.\d+]*)",
|
138
|
+
s,
|
139
|
+
)
|
140
|
+
else:
|
141
|
+
m = re.match(r"(?P<hours>\d+):(?P<minutes>\d+):(?P<seconds>\d[\.\d+]*)", s)
|
142
|
+
|
143
|
+
if m:
|
144
|
+
return {key: float(val) for key, val in m.groupdict().items()}
|
145
|
+
else:
|
146
|
+
return {}
|
147
|
+
|
148
|
+
|
149
|
+
def load_entry_points(group):
|
150
|
+
"""Load all extensions registered to the given entry point group"""
|
151
|
+
try:
|
152
|
+
import importlib_metadata
|
153
|
+
except ImportError:
|
154
|
+
# For python 3.10 and later
|
155
|
+
import importlib.metadata as importlib_metadata
|
156
|
+
|
157
|
+
eps = importlib_metadata.entry_points(group=group)
|
158
|
+
for ep in eps:
|
159
|
+
try:
|
160
|
+
ep.load()
|
161
|
+
except Exception as e:
|
162
|
+
print(f"Extension {ep.name} of group {group} failed to load with {e}", file=sys.stderr)
|
163
|
+
print(traceback.format_exc(), file=sys.stderr)
|
aprsd/utils/counter.py
ADDED
@@ -0,0 +1,51 @@
|
|
1
|
+
from multiprocessing import RawValue
|
2
|
+
import random
|
3
|
+
import threading
|
4
|
+
|
5
|
+
import wrapt
|
6
|
+
|
7
|
+
|
8
|
+
MAX_PACKET_ID = 9999
|
9
|
+
|
10
|
+
|
11
|
+
class PacketCounter:
|
12
|
+
"""
|
13
|
+
Global Packet id counter class.
|
14
|
+
|
15
|
+
This is a singleton based class that keeps
|
16
|
+
an incrementing counter for all packets to
|
17
|
+
be sent. All new Packet objects gets a new
|
18
|
+
message id, which is the next number available
|
19
|
+
from the PacketCounter.
|
20
|
+
|
21
|
+
"""
|
22
|
+
|
23
|
+
_instance = None
|
24
|
+
lock = threading.Lock()
|
25
|
+
|
26
|
+
def __new__(cls, *args, **kwargs):
|
27
|
+
"""Make this a singleton class."""
|
28
|
+
if cls._instance is None:
|
29
|
+
cls._instance = super().__new__(cls, *args, **kwargs)
|
30
|
+
cls._instance.val = RawValue("i", random.randint(1, MAX_PACKET_ID))
|
31
|
+
return cls._instance
|
32
|
+
|
33
|
+
@wrapt.synchronized(lock)
|
34
|
+
def increment(self):
|
35
|
+
if self.val.value == MAX_PACKET_ID:
|
36
|
+
self.val.value = 1
|
37
|
+
else:
|
38
|
+
self.val.value += 1
|
39
|
+
|
40
|
+
@property
|
41
|
+
@wrapt.synchronized(lock)
|
42
|
+
def value(self):
|
43
|
+
return str(self.val.value)
|
44
|
+
|
45
|
+
@wrapt.synchronized(lock)
|
46
|
+
def __repr__(self):
|
47
|
+
return str(self.val.value)
|
48
|
+
|
49
|
+
@wrapt.synchronized(lock)
|
50
|
+
def __str__(self):
|
51
|
+
return str(self.val.value)
|
aprsd/utils/json.py
ADDED
@@ -0,0 +1,80 @@
|
|
1
|
+
import datetime
|
2
|
+
import decimal
|
3
|
+
import json
|
4
|
+
import sys
|
5
|
+
|
6
|
+
from aprsd.packets import core
|
7
|
+
|
8
|
+
|
9
|
+
class EnhancedJSONEncoder(json.JSONEncoder):
|
10
|
+
def default(self, obj):
|
11
|
+
if isinstance(obj, datetime.datetime):
|
12
|
+
args = (
|
13
|
+
"year", "month", "day", "hour", "minute",
|
14
|
+
"second", "microsecond",
|
15
|
+
)
|
16
|
+
return {
|
17
|
+
"__type__": "datetime.datetime",
|
18
|
+
"args": [getattr(obj, a) for a in args],
|
19
|
+
}
|
20
|
+
elif isinstance(obj, datetime.date):
|
21
|
+
args = ("year", "month", "day")
|
22
|
+
return {
|
23
|
+
"__type__": "datetime.date",
|
24
|
+
"args": [getattr(obj, a) for a in args],
|
25
|
+
}
|
26
|
+
elif isinstance(obj, datetime.time):
|
27
|
+
args = ("hour", "minute", "second", "microsecond")
|
28
|
+
return {
|
29
|
+
"__type__": "datetime.time",
|
30
|
+
"args": [getattr(obj, a) for a in args],
|
31
|
+
}
|
32
|
+
elif isinstance(obj, datetime.timedelta):
|
33
|
+
args = ("days", "seconds", "microseconds")
|
34
|
+
return {
|
35
|
+
"__type__": "datetime.timedelta",
|
36
|
+
"args": [getattr(obj, a) for a in args],
|
37
|
+
}
|
38
|
+
elif isinstance(obj, decimal.Decimal):
|
39
|
+
return {
|
40
|
+
"__type__": "decimal.Decimal",
|
41
|
+
"args": [str(obj)],
|
42
|
+
}
|
43
|
+
else:
|
44
|
+
return super().default(obj)
|
45
|
+
|
46
|
+
|
47
|
+
class SimpleJSONEncoder(json.JSONEncoder):
|
48
|
+
def default(self, obj):
|
49
|
+
if isinstance(obj, datetime.datetime):
|
50
|
+
return obj.isoformat()
|
51
|
+
elif isinstance(obj, datetime.date):
|
52
|
+
return str(obj)
|
53
|
+
elif isinstance(obj, datetime.time):
|
54
|
+
return str(obj)
|
55
|
+
elif isinstance(obj, datetime.timedelta):
|
56
|
+
return str(obj)
|
57
|
+
elif isinstance(obj, decimal.Decimal):
|
58
|
+
return str(obj)
|
59
|
+
elif isinstance(obj, core.Packet):
|
60
|
+
return obj.to_dict()
|
61
|
+
else:
|
62
|
+
return super().default(obj)
|
63
|
+
|
64
|
+
|
65
|
+
class EnhancedJSONDecoder(json.JSONDecoder):
|
66
|
+
|
67
|
+
def __init__(self, *args, **kwargs):
|
68
|
+
super().__init__(
|
69
|
+
*args, object_hook=self.object_hook,
|
70
|
+
**kwargs,
|
71
|
+
)
|
72
|
+
|
73
|
+
def object_hook(self, d):
|
74
|
+
if "__type__" not in d:
|
75
|
+
return d
|
76
|
+
o = sys.modules[__name__]
|
77
|
+
for e in d["__type__"].split("."):
|
78
|
+
o = getattr(o, e)
|
79
|
+
args, kwargs = d.get("args", ()), d.get("kwargs", {})
|
80
|
+
return o(*args, **kwargs)
|
@@ -0,0 +1,123 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
import pathlib
|
4
|
+
import pickle
|
5
|
+
import threading
|
6
|
+
|
7
|
+
from oslo_config import cfg
|
8
|
+
|
9
|
+
|
10
|
+
CONF = cfg.CONF
|
11
|
+
LOG = logging.getLogger("APRSD")
|
12
|
+
|
13
|
+
|
14
|
+
class ObjectStoreMixin:
|
15
|
+
"""Class 'MIXIN' intended to save/load object data.
|
16
|
+
|
17
|
+
The asumption of how this mixin is used:
|
18
|
+
The using class has to have a:
|
19
|
+
* data in self.data as a dictionary
|
20
|
+
* a self.lock thread lock
|
21
|
+
* Class must specify self.save_file as the location.
|
22
|
+
|
23
|
+
|
24
|
+
When APRSD quits, it calls save()
|
25
|
+
When APRSD Starts, it calls load()
|
26
|
+
aprsd server -f (flush) will wipe all saved objects.
|
27
|
+
"""
|
28
|
+
|
29
|
+
def __init__(self):
|
30
|
+
self.lock = threading.RLock()
|
31
|
+
|
32
|
+
def __len__(self):
|
33
|
+
with self.lock:
|
34
|
+
return len(self.data)
|
35
|
+
|
36
|
+
def __iter__(self):
|
37
|
+
with self.lock:
|
38
|
+
return iter(self.data)
|
39
|
+
|
40
|
+
def get_all(self):
|
41
|
+
with self.lock:
|
42
|
+
return self.data
|
43
|
+
|
44
|
+
def get(self, key):
|
45
|
+
with self.lock:
|
46
|
+
return self.data.get(key)
|
47
|
+
|
48
|
+
def copy(self):
|
49
|
+
with self.lock:
|
50
|
+
return self.data.copy()
|
51
|
+
|
52
|
+
def _init_store(self):
|
53
|
+
if not CONF.enable_save:
|
54
|
+
return
|
55
|
+
sl = CONF.save_location
|
56
|
+
if not os.path.exists(sl):
|
57
|
+
LOG.warning(f"Save location {sl} doesn't exist")
|
58
|
+
try:
|
59
|
+
os.makedirs(sl)
|
60
|
+
except Exception as ex:
|
61
|
+
LOG.exception(ex)
|
62
|
+
|
63
|
+
def _save_filename(self):
|
64
|
+
save_location = CONF.save_location
|
65
|
+
|
66
|
+
return "{}/{}.p".format(
|
67
|
+
save_location,
|
68
|
+
self.__class__.__name__.lower(),
|
69
|
+
)
|
70
|
+
|
71
|
+
def save(self):
|
72
|
+
"""Save any queued to disk?"""
|
73
|
+
if not CONF.enable_save:
|
74
|
+
return
|
75
|
+
self._init_store()
|
76
|
+
save_filename = self._save_filename()
|
77
|
+
if len(self) > 0:
|
78
|
+
LOG.info(
|
79
|
+
f"{self.__class__.__name__}::Saving"
|
80
|
+
f" {len(self)} entries to disk at "
|
81
|
+
f"{save_filename}",
|
82
|
+
)
|
83
|
+
with self.lock:
|
84
|
+
with open(save_filename, "wb+") as fp:
|
85
|
+
pickle.dump(self.data, fp)
|
86
|
+
else:
|
87
|
+
LOG.debug(
|
88
|
+
"{} Nothing to save, flushing old save file '{}'".format(
|
89
|
+
self.__class__.__name__,
|
90
|
+
save_filename,
|
91
|
+
),
|
92
|
+
)
|
93
|
+
self.flush()
|
94
|
+
|
95
|
+
def load(self):
|
96
|
+
if not CONF.enable_save:
|
97
|
+
return
|
98
|
+
if os.path.exists(self._save_filename()):
|
99
|
+
try:
|
100
|
+
with open(self._save_filename(), "rb") as fp:
|
101
|
+
raw = pickle.load(fp)
|
102
|
+
if raw:
|
103
|
+
self.data = raw
|
104
|
+
LOG.debug(
|
105
|
+
f"{self.__class__.__name__}::Loaded {len(self)} entries from disk.",
|
106
|
+
)
|
107
|
+
else:
|
108
|
+
LOG.debug(f"{self.__class__.__name__}::No data to load.")
|
109
|
+
except (pickle.UnpicklingError, Exception) as ex:
|
110
|
+
LOG.error(f"Failed to UnPickle {self._save_filename()}")
|
111
|
+
LOG.error(ex)
|
112
|
+
self.data = {}
|
113
|
+
else:
|
114
|
+
LOG.debug(f"{self.__class__.__name__}::No save file found.")
|
115
|
+
|
116
|
+
def flush(self):
|
117
|
+
"""Nuke the old pickle file that stored the old results from last aprsd run."""
|
118
|
+
if not CONF.enable_save:
|
119
|
+
return
|
120
|
+
if os.path.exists(self._save_filename()):
|
121
|
+
pathlib.Path(self._save_filename()).unlink()
|
122
|
+
with self.lock:
|
123
|
+
self.data = {}
|