aprsd 1.0.0__py3-none-any.whl → 3.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. aprsd/__init__.py +6 -4
  2. aprsd/cli_helper.py +151 -0
  3. aprsd/client/__init__.py +13 -0
  4. aprsd/client/aprsis.py +132 -0
  5. aprsd/client/base.py +105 -0
  6. aprsd/client/drivers/__init__.py +0 -0
  7. aprsd/client/drivers/aprsis.py +224 -0
  8. aprsd/client/drivers/fake.py +73 -0
  9. aprsd/client/drivers/kiss.py +119 -0
  10. aprsd/client/factory.py +88 -0
  11. aprsd/client/fake.py +48 -0
  12. aprsd/client/kiss.py +103 -0
  13. aprsd/client/stats.py +38 -0
  14. aprsd/cmds/__init__.py +0 -0
  15. aprsd/cmds/completion.py +22 -0
  16. aprsd/cmds/dev.py +162 -0
  17. aprsd/cmds/fetch_stats.py +156 -0
  18. aprsd/cmds/healthcheck.py +86 -0
  19. aprsd/cmds/list_plugins.py +319 -0
  20. aprsd/cmds/listen.py +230 -0
  21. aprsd/cmds/send_message.py +174 -0
  22. aprsd/cmds/server.py +142 -0
  23. aprsd/cmds/webchat.py +681 -0
  24. aprsd/conf/__init__.py +56 -0
  25. aprsd/conf/client.py +131 -0
  26. aprsd/conf/common.py +302 -0
  27. aprsd/conf/log.py +65 -0
  28. aprsd/conf/opts.py +80 -0
  29. aprsd/conf/plugin_common.py +191 -0
  30. aprsd/conf/plugin_email.py +105 -0
  31. aprsd/exception.py +13 -0
  32. aprsd/log/__init__.py +0 -0
  33. aprsd/log/log.py +138 -0
  34. aprsd/main.py +104 -867
  35. aprsd/messaging.py +4 -0
  36. aprsd/packets/__init__.py +12 -0
  37. aprsd/packets/collector.py +56 -0
  38. aprsd/packets/core.py +823 -0
  39. aprsd/packets/log.py +143 -0
  40. aprsd/packets/packet_list.py +116 -0
  41. aprsd/packets/seen_list.py +54 -0
  42. aprsd/packets/tracker.py +109 -0
  43. aprsd/packets/watch_list.py +122 -0
  44. aprsd/plugin.py +475 -284
  45. aprsd/plugin_utils.py +86 -0
  46. aprsd/plugins/__init__.py +0 -0
  47. aprsd/plugins/email.py +709 -0
  48. aprsd/plugins/fortune.py +61 -0
  49. aprsd/plugins/location.py +179 -0
  50. aprsd/plugins/notify.py +61 -0
  51. aprsd/plugins/ping.py +31 -0
  52. aprsd/plugins/time.py +115 -0
  53. aprsd/plugins/version.py +31 -0
  54. aprsd/plugins/weather.py +405 -0
  55. aprsd/stats/__init__.py +20 -0
  56. aprsd/stats/app.py +49 -0
  57. aprsd/stats/collector.py +38 -0
  58. aprsd/threads/__init__.py +11 -0
  59. aprsd/threads/aprsd.py +119 -0
  60. aprsd/threads/keep_alive.py +124 -0
  61. aprsd/threads/log_monitor.py +121 -0
  62. aprsd/threads/registry.py +56 -0
  63. aprsd/threads/rx.py +354 -0
  64. aprsd/threads/stats.py +44 -0
  65. aprsd/threads/tx.py +255 -0
  66. aprsd/utils/__init__.py +163 -0
  67. aprsd/utils/counter.py +51 -0
  68. aprsd/utils/json.py +80 -0
  69. aprsd/utils/objectstore.py +123 -0
  70. aprsd/utils/ring_buffer.py +40 -0
  71. aprsd/utils/trace.py +180 -0
  72. aprsd/web/__init__.py +0 -0
  73. aprsd/web/admin/__init__.py +0 -0
  74. aprsd/web/admin/static/css/index.css +84 -0
  75. aprsd/web/admin/static/css/prism.css +4 -0
  76. aprsd/web/admin/static/css/tabs.css +35 -0
  77. aprsd/web/admin/static/images/Untitled.png +0 -0
  78. aprsd/web/admin/static/images/aprs-symbols-16-0.png +0 -0
  79. aprsd/web/admin/static/images/aprs-symbols-16-1.png +0 -0
  80. aprsd/web/admin/static/images/aprs-symbols-64-0.png +0 -0
  81. aprsd/web/admin/static/images/aprs-symbols-64-1.png +0 -0
  82. aprsd/web/admin/static/images/aprs-symbols-64-2.png +0 -0
  83. aprsd/web/admin/static/js/charts.js +235 -0
  84. aprsd/web/admin/static/js/echarts.js +465 -0
  85. aprsd/web/admin/static/js/logs.js +26 -0
  86. aprsd/web/admin/static/js/main.js +231 -0
  87. aprsd/web/admin/static/js/prism.js +12 -0
  88. aprsd/web/admin/static/js/send-message.js +114 -0
  89. aprsd/web/admin/static/js/tabs.js +28 -0
  90. aprsd/web/admin/templates/index.html +196 -0
  91. aprsd/web/chat/static/css/chat.css +115 -0
  92. aprsd/web/chat/static/css/index.css +66 -0
  93. aprsd/web/chat/static/css/style.css.map +1 -0
  94. aprsd/web/chat/static/css/tabs.css +41 -0
  95. aprsd/web/chat/static/css/upstream/bootstrap.min.css +6 -0
  96. aprsd/web/chat/static/css/upstream/font.woff2 +0 -0
  97. aprsd/web/chat/static/css/upstream/google-fonts.css +23 -0
  98. aprsd/web/chat/static/css/upstream/jquery-ui.css +1311 -0
  99. aprsd/web/chat/static/css/upstream/jquery.toast.css +28 -0
  100. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Bold.woff +0 -0
  101. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Bold.woff2 +0 -0
  102. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Regular.woff +0 -0
  103. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/LatoLatin-Regular.woff2 +0 -0
  104. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/icons.woff +0 -0
  105. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/icons.woff2 +0 -0
  106. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/outline-icons.woff +0 -0
  107. aprsd/web/chat/static/css/upstream/themes/default/assets/fonts/outline-icons.woff2 +0 -0
  108. aprsd/web/chat/static/images/Untitled.png +0 -0
  109. aprsd/web/chat/static/images/aprs-symbols-16-0.png +0 -0
  110. aprsd/web/chat/static/images/aprs-symbols-16-1.png +0 -0
  111. aprsd/web/chat/static/images/aprs-symbols-64-0.png +0 -0
  112. aprsd/web/chat/static/images/aprs-symbols-64-1.png +0 -0
  113. aprsd/web/chat/static/images/aprs-symbols-64-2.png +0 -0
  114. aprsd/web/chat/static/images/globe.svg +3 -0
  115. aprsd/web/chat/static/js/gps.js +84 -0
  116. aprsd/web/chat/static/js/main.js +45 -0
  117. aprsd/web/chat/static/js/send-message.js +585 -0
  118. aprsd/web/chat/static/js/tabs.js +28 -0
  119. aprsd/web/chat/static/js/upstream/bootstrap.bundle.min.js +7 -0
  120. aprsd/web/chat/static/js/upstream/jquery-3.7.1.min.js +2 -0
  121. aprsd/web/chat/static/js/upstream/jquery-ui.min.js +13 -0
  122. aprsd/web/chat/static/js/upstream/jquery.toast.js +374 -0
  123. aprsd/web/chat/static/js/upstream/semantic.min.js +11 -0
  124. aprsd/web/chat/static/js/upstream/socket.io.min.js +7 -0
  125. aprsd/web/chat/templates/index.html +139 -0
  126. aprsd/wsgi.py +315 -0
  127. aprsd-3.4.1.dist-info/AUTHORS +13 -0
  128. aprsd-3.4.1.dist-info/LICENSE +175 -0
  129. aprsd-3.4.1.dist-info/METADATA +799 -0
  130. aprsd-3.4.1.dist-info/RECORD +134 -0
  131. {aprsd-1.0.0.dist-info → aprsd-3.4.1.dist-info}/WHEEL +1 -1
  132. aprsd-3.4.1.dist-info/entry_points.txt +8 -0
  133. aprsd/fake_aprs.py +0 -83
  134. aprsd/utils.py +0 -166
  135. aprsd-1.0.0.dist-info/AUTHORS +0 -6
  136. aprsd-1.0.0.dist-info/METADATA +0 -181
  137. aprsd-1.0.0.dist-info/RECORD +0 -13
  138. aprsd-1.0.0.dist-info/entry_points.txt +0 -4
  139. aprsd-1.0.0.dist-info/pbr.json +0 -1
  140. /aprsd/{fuzzyclock.py → utils/fuzzyclock.py} +0 -0
  141. {aprsd-1.0.0.dist-info → aprsd-3.4.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,124 @@
1
+ import datetime
2
+ import logging
3
+ import time
4
+ import tracemalloc
5
+
6
+ from oslo_config import cfg
7
+
8
+ from aprsd import packets, utils
9
+ from aprsd.client import client_factory
10
+ from aprsd.log import log as aprsd_log
11
+ from aprsd.stats import collector
12
+ from aprsd.threads import APRSDThread, APRSDThreadList
13
+
14
+
15
+ CONF = cfg.CONF
16
+ LOG = logging.getLogger("APRSD")
17
+
18
+
19
+ class KeepAliveThread(APRSDThread):
20
+ cntr = 0
21
+ checker_time = datetime.datetime.now()
22
+
23
+ def __init__(self):
24
+ tracemalloc.start()
25
+ super().__init__("KeepAlive")
26
+ max_timeout = {"hours": 0.0, "minutes": 2, "seconds": 0}
27
+ self.max_delta = datetime.timedelta(**max_timeout)
28
+
29
+ def loop(self):
30
+ if self.loop_count % 60 == 0:
31
+ stats_json = collector.Collector().collect()
32
+ pl = packets.PacketList()
33
+ thread_list = APRSDThreadList()
34
+ now = datetime.datetime.now()
35
+
36
+ if "EmailStats" in stats_json:
37
+ email_stats = stats_json["EmailStats"]
38
+ if email_stats.get("last_check_time"):
39
+ email_thread_time = utils.strfdelta(now - email_stats["last_check_time"])
40
+ else:
41
+ email_thread_time = "N/A"
42
+ else:
43
+ email_thread_time = "N/A"
44
+
45
+ if "APRSClientStats" in stats_json and stats_json["APRSClientStats"].get("transport") == "aprsis":
46
+ if stats_json["APRSClientStats"].get("server_keepalive"):
47
+ last_msg_time = utils.strfdelta(now - stats_json["APRSClientStats"]["server_keepalive"])
48
+ else:
49
+ last_msg_time = "N/A"
50
+ else:
51
+ last_msg_time = "N/A"
52
+
53
+ tracked_packets = stats_json["PacketTrack"]["total_tracked"]
54
+ tx_msg = 0
55
+ rx_msg = 0
56
+ if "PacketList" in stats_json:
57
+ msg_packets = stats_json["PacketList"].get("MessagePacket")
58
+ if msg_packets:
59
+ tx_msg = msg_packets.get("tx", 0)
60
+ rx_msg = msg_packets.get("rx", 0)
61
+
62
+ keepalive = (
63
+ "{} - Uptime {} RX:{} TX:{} Tracker:{} Msgs TX:{} RX:{} "
64
+ "Last:{} Email: {} - RAM Current:{} Peak:{} Threads:{} LoggingQueue:{}"
65
+ ).format(
66
+ stats_json["APRSDStats"]["callsign"],
67
+ stats_json["APRSDStats"]["uptime"],
68
+ pl.total_rx(),
69
+ pl.total_tx(),
70
+ tracked_packets,
71
+ tx_msg,
72
+ rx_msg,
73
+ last_msg_time,
74
+ email_thread_time,
75
+ stats_json["APRSDStats"]["memory_current_str"],
76
+ stats_json["APRSDStats"]["memory_peak_str"],
77
+ len(thread_list),
78
+ aprsd_log.logging_queue.qsize(),
79
+ )
80
+ LOG.info(keepalive)
81
+ if "APRSDThreadList" in stats_json:
82
+ thread_list = stats_json["APRSDThreadList"]
83
+ for thread_name in thread_list:
84
+ thread = thread_list[thread_name]
85
+ alive = thread["alive"]
86
+ age = thread["age"]
87
+ key = thread["name"]
88
+ if not alive:
89
+ LOG.error(f"Thread {thread}")
90
+ LOG.info(f"{key: <15} Alive? {str(alive): <5} {str(age): <20}")
91
+
92
+ # check the APRS connection
93
+ cl = client_factory.create()
94
+ # Reset the connection if it's dead and this isn't our
95
+ # First time through the loop.
96
+ # The first time through the loop can happen at startup where
97
+ # The keepalive thread starts before the client has a chance
98
+ # to make it's connection the first time.
99
+ if not cl.is_alive() and self.cntr > 0:
100
+ LOG.error(f"{cl.__class__.__name__} is not alive!!! Resetting")
101
+ client_factory.create().reset()
102
+ # else:
103
+ # # See if we should reset the aprs-is client
104
+ # # Due to losing a keepalive from them
105
+ # delta_dict = utils.parse_delta_str(last_msg_time)
106
+ # delta = datetime.timedelta(**delta_dict)
107
+ #
108
+ # if delta > self.max_delta:
109
+ # # We haven't gotten a keepalive from aprs-is in a while
110
+ # # reset the connection.a
111
+ # if not client.KISSClient.is_enabled():
112
+ # LOG.warning(f"Resetting connection to APRS-IS {delta}")
113
+ # client.factory.create().reset()
114
+
115
+ # Check version every day
116
+ delta = now - self.checker_time
117
+ if delta > datetime.timedelta(hours=24):
118
+ self.checker_time = now
119
+ level, msg = utils._check_version()
120
+ if level:
121
+ LOG.warning(msg)
122
+ self.cntr += 1
123
+ time.sleep(1)
124
+ return True
@@ -0,0 +1,121 @@
1
+ import datetime
2
+ import logging
3
+ import threading
4
+
5
+ from oslo_config import cfg
6
+ import requests
7
+ import wrapt
8
+
9
+ from aprsd import threads
10
+ from aprsd.log import log
11
+
12
+
13
+ CONF = cfg.CONF
14
+ LOG = logging.getLogger("APRSD")
15
+
16
+
17
+ def send_log_entries(force=False):
18
+ """Send all of the log entries to the web interface."""
19
+ if CONF.admin.web_enabled:
20
+ if force or LogEntries().is_purge_ready():
21
+ entries = LogEntries().get_all_and_purge()
22
+ if entries:
23
+ try:
24
+ requests.post(
25
+ f"http://{CONF.admin.web_ip}:{CONF.admin.web_port}/log_entries",
26
+ json=entries,
27
+ auth=(CONF.admin.user, CONF.admin.password),
28
+ )
29
+ except Exception:
30
+ LOG.warning(f"Failed to send log entries. len={len(entries)}")
31
+
32
+
33
+ class LogEntries:
34
+ entries = []
35
+ lock = threading.Lock()
36
+ _instance = None
37
+ last_purge = datetime.datetime.now()
38
+ max_delta = datetime.timedelta(
39
+ hours=0.0, minutes=0, seconds=2,
40
+ )
41
+
42
+ def __new__(cls, *args, **kwargs):
43
+ if cls._instance is None:
44
+ cls._instance = super().__new__(cls)
45
+ return cls._instance
46
+
47
+ def stats(self) -> dict:
48
+ return {
49
+ "log_entries": self.entries,
50
+ }
51
+
52
+ @wrapt.synchronized(lock)
53
+ def add(self, entry):
54
+ self.entries.append(entry)
55
+
56
+ @wrapt.synchronized(lock)
57
+ def get_all_and_purge(self):
58
+ entries = self.entries.copy()
59
+ self.entries = []
60
+ self.last_purge = datetime.datetime.now()
61
+ return entries
62
+
63
+ def is_purge_ready(self):
64
+ now = datetime.datetime.now()
65
+ if (
66
+ now - self.last_purge > self.max_delta
67
+ and len(self.entries) > 1
68
+ ):
69
+ return True
70
+ return False
71
+
72
+ @wrapt.synchronized(lock)
73
+ def __len__(self):
74
+ return len(self.entries)
75
+
76
+
77
+ class LogMonitorThread(threads.APRSDThread):
78
+
79
+ def __init__(self):
80
+ super().__init__("LogMonitorThread")
81
+
82
+ def stop(self):
83
+ send_log_entries(force=True)
84
+ super().stop()
85
+
86
+ def loop(self):
87
+ try:
88
+ record = log.logging_queue.get(block=True, timeout=2)
89
+ if isinstance(record, list):
90
+ for item in record:
91
+ entry = self.json_record(item)
92
+ LogEntries().add(entry)
93
+ else:
94
+ entry = self.json_record(record)
95
+ LogEntries().add(entry)
96
+ except Exception:
97
+ # Just ignore thi
98
+ pass
99
+
100
+ send_log_entries()
101
+ return True
102
+
103
+ def json_record(self, record):
104
+ entry = {}
105
+ entry["filename"] = record.filename
106
+ entry["funcName"] = record.funcName
107
+ entry["levelname"] = record.levelname
108
+ entry["lineno"] = record.lineno
109
+ entry["module"] = record.module
110
+ entry["name"] = record.name
111
+ entry["pathname"] = record.pathname
112
+ entry["process"] = record.process
113
+ entry["processName"] = record.processName
114
+ if hasattr(record, "stack_info"):
115
+ entry["stack_info"] = record.stack_info
116
+ else:
117
+ entry["stack_info"] = None
118
+ entry["thread"] = record.thread
119
+ entry["threadName"] = record.threadName
120
+ entry["message"] = record.getMessage()
121
+ return entry
@@ -0,0 +1,56 @@
1
+ import logging
2
+ import time
3
+
4
+ from oslo_config import cfg
5
+ import requests
6
+
7
+ import aprsd
8
+ from aprsd import threads as aprsd_threads
9
+
10
+
11
+ CONF = cfg.CONF
12
+ LOG = logging.getLogger("APRSD")
13
+
14
+
15
+ class APRSRegistryThread(aprsd_threads.APRSDThread):
16
+ """This sends service information to the configured APRS Registry."""
17
+ _loop_cnt: int = 1
18
+
19
+ def __init__(self):
20
+ super().__init__("APRSRegistryThread")
21
+ self._loop_cnt = 1
22
+ if not CONF.aprs_registry.enabled:
23
+ LOG.error(
24
+ "APRS Registry is not enabled. ",
25
+ )
26
+ LOG.error(
27
+ "APRS Registry thread is STOPPING.",
28
+ )
29
+ self.stop()
30
+ LOG.info(
31
+ "APRS Registry thread is running and will send "
32
+ f"info every {CONF.aprs_registry.frequency_seconds} seconds "
33
+ f"to {CONF.aprs_registry.registry_url}.",
34
+ )
35
+
36
+ def loop(self):
37
+ # Only call the registry every N seconds
38
+ if self._loop_cnt % CONF.aprs_registry.frequency_seconds == 0:
39
+ info = {
40
+ "callsign": CONF.callsign,
41
+ "description": CONF.aprs_registry.description,
42
+ "service_website": CONF.aprs_registry.service_website,
43
+ "software": f"APRSD version {aprsd.__version__} "
44
+ "https://github.com/craigerl/aprsd",
45
+ }
46
+ try:
47
+ requests.post(
48
+ f"{CONF.aprs_registry.registry_url}",
49
+ json=info,
50
+ )
51
+ except Exception as e:
52
+ LOG.error(f"Failed to send registry info: {e}")
53
+
54
+ time.sleep(1)
55
+ self._loop_cnt += 1
56
+ return True
aprsd/threads/rx.py ADDED
@@ -0,0 +1,354 @@
1
+ import abc
2
+ import logging
3
+ import queue
4
+ import time
5
+
6
+ import aprslib
7
+ from oslo_config import cfg
8
+
9
+ from aprsd import packets, plugin
10
+ from aprsd.client import client_factory
11
+ from aprsd.packets import collector
12
+ from aprsd.packets import log as packet_log
13
+ from aprsd.threads import APRSDThread, tx
14
+
15
+
16
+ CONF = cfg.CONF
17
+ LOG = logging.getLogger("APRSD")
18
+
19
+
20
+ class APRSDRXThread(APRSDThread):
21
+ def __init__(self, packet_queue):
22
+ super().__init__("RX_PKT")
23
+ self.packet_queue = packet_queue
24
+ self._client = client_factory.create()
25
+
26
+ def stop(self):
27
+ self.thread_stop = True
28
+ if self._client:
29
+ self._client.stop()
30
+
31
+ def loop(self):
32
+ if not self._client:
33
+ self._client = client_factory.create()
34
+ time.sleep(1)
35
+ return True
36
+ # setup the consumer of messages and block until a messages
37
+ try:
38
+ # This will register a packet consumer with aprslib
39
+ # When new packets come in the consumer will process
40
+ # the packet
41
+
42
+ # Do a partial here because the consumer signature doesn't allow
43
+ # For kwargs to be passed in to the consumer func we declare
44
+ # and the aprslib developer didn't want to allow a PR to add
45
+ # kwargs. :(
46
+ # https://github.com/rossengeorgiev/aprs-python/pull/56
47
+ self._client.consumer(
48
+ self._process_packet, raw=False, blocking=False,
49
+ )
50
+ except (
51
+ aprslib.exceptions.ConnectionDrop,
52
+ aprslib.exceptions.ConnectionError,
53
+ ):
54
+ LOG.error("Connection dropped, reconnecting")
55
+ # Force the deletion of the client object connected to aprs
56
+ # This will cause a reconnect, next time client.get_client()
57
+ # is called
58
+ self._client.reset()
59
+ time.sleep(5)
60
+ except Exception:
61
+ # LOG.exception(ex)
62
+ LOG.error("Resetting connection and trying again.")
63
+ self._client.reset()
64
+ time.sleep(5)
65
+ # Continue to loop
66
+ return True
67
+
68
+ def _process_packet(self, *args, **kwargs):
69
+ """Intermediate callback so we can update the keepalive time."""
70
+ # Now call the 'real' packet processing for a RX'x packet
71
+ self.process_packet(*args, **kwargs)
72
+
73
+ @abc.abstractmethod
74
+ def process_packet(self, *args, **kwargs):
75
+ pass
76
+
77
+
78
+ class APRSDDupeRXThread(APRSDRXThread):
79
+ """Process received packets.
80
+
81
+ This is the main APRSD Server command thread that
82
+ receives packets and makes sure the packet
83
+ hasn't been seen previously before sending it on
84
+ to be processed.
85
+ """
86
+
87
+ def process_packet(self, *args, **kwargs):
88
+ """This handles the processing of an inbound packet.
89
+
90
+ When a packet is received by the connected client object,
91
+ it sends the raw packet into this function. This function then
92
+ decodes the packet via the client, and then processes the packet.
93
+ Ack Packets are sent to the PluginProcessPacketThread for processing.
94
+ All other packets have to be checked as a dupe, and then only after
95
+ we haven't seen this packet before, do we send it to the
96
+ PluginProcessPacketThread for processing.
97
+ """
98
+ packet = self._client.decode_packet(*args, **kwargs)
99
+ # LOG.debug(raw)
100
+ packet_log.log(packet)
101
+ pkt_list = packets.PacketList()
102
+
103
+ if isinstance(packet, packets.AckPacket):
104
+ # We don't need to drop AckPackets, those should be
105
+ # processed.
106
+ self.packet_queue.put(packet)
107
+ else:
108
+ # Make sure we aren't re-processing the same packet
109
+ # For RF based APRS Clients we can get duplicate packets
110
+ # So we need to track them and not process the dupes.
111
+ found = False
112
+ try:
113
+ # Find the packet in the list of already seen packets
114
+ # Based on the packet.key
115
+ found = pkt_list.find(packet)
116
+ except KeyError:
117
+ found = False
118
+
119
+ if not found:
120
+ # We haven't seen this packet before, so we process it.
121
+ collector.PacketCollector().rx(packet)
122
+ self.packet_queue.put(packet)
123
+ elif packet.timestamp - found.timestamp < CONF.packet_dupe_timeout:
124
+ # If the packet came in within N seconds of the
125
+ # Last time seeing the packet, then we drop it as a dupe.
126
+ LOG.warning(f"Packet {packet.from_call}:{packet.msgNo} already tracked, dropping.")
127
+ else:
128
+ LOG.warning(
129
+ f"Packet {packet.from_call}:{packet.msgNo} already tracked "
130
+ f"but older than {CONF.packet_dupe_timeout} seconds. processing.",
131
+ )
132
+ collector.PacketCollector().rx(packet)
133
+ self.packet_queue.put(packet)
134
+
135
+
136
+ class APRSDPluginRXThread(APRSDDupeRXThread):
137
+ """"Process received packets.
138
+
139
+ For backwards compatibility, we keep the APRSDPluginRXThread.
140
+ """
141
+
142
+
143
+ class APRSDProcessPacketThread(APRSDThread):
144
+ """Base class for processing received packets.
145
+
146
+ This is the base class for processing packets coming from
147
+ the consumer. This base class handles sending ack packets and
148
+ will ack a message before sending the packet to the subclass
149
+ for processing."""
150
+
151
+ def __init__(self, packet_queue):
152
+ self.packet_queue = packet_queue
153
+ super().__init__("ProcessPKT")
154
+
155
+ def process_ack_packet(self, packet):
156
+ """We got an ack for a message, no need to resend it."""
157
+ ack_num = packet.msgNo
158
+ LOG.debug(f"Got ack for message {ack_num}")
159
+ collector.PacketCollector().rx(packet)
160
+
161
+ def process_piggyback_ack(self, packet):
162
+ """We got an ack embedded in a packet."""
163
+ ack_num = packet.ackMsgNo
164
+ LOG.debug(f"Got PiggyBackAck for message {ack_num}")
165
+ collector.PacketCollector().rx(packet)
166
+
167
+ def process_reject_packet(self, packet):
168
+ """We got a reject message for a packet. Stop sending the message."""
169
+ ack_num = packet.msgNo
170
+ LOG.debug(f"Got REJECT for message {ack_num}")
171
+ collector.PacketCollector().rx(packet)
172
+
173
+ def loop(self):
174
+ try:
175
+ packet = self.packet_queue.get(timeout=1)
176
+ if packet:
177
+ self.process_packet(packet)
178
+ except queue.Empty:
179
+ pass
180
+ return True
181
+
182
+ def process_packet(self, packet):
183
+ """Process a packet received from aprs-is server."""
184
+ LOG.debug(f"ProcessPKT-LOOP {self.loop_count}")
185
+ our_call = CONF.callsign.lower()
186
+
187
+ from_call = packet.from_call
188
+ if packet.addresse:
189
+ to_call = packet.addresse
190
+ else:
191
+ to_call = packet.to_call
192
+ msg_id = packet.msgNo
193
+
194
+ # We don't put ack packets destined for us through the
195
+ # plugins.
196
+ if (
197
+ isinstance(packet, packets.AckPacket)
198
+ and packet.addresse.lower() == our_call
199
+ ):
200
+ self.process_ack_packet(packet)
201
+ elif (
202
+ isinstance(packet, packets.RejectPacket)
203
+ and packet.addresse.lower() == our_call
204
+ ):
205
+ self.process_reject_packet(packet)
206
+ else:
207
+ if hasattr(packet, "ackMsgNo") and packet.ackMsgNo:
208
+ # we got an ack embedded in this packet
209
+ # we need to handle the ack
210
+ self.process_piggyback_ack(packet)
211
+ # Only ack messages that were sent directly to us
212
+ if isinstance(packet, packets.MessagePacket):
213
+ if to_call and to_call.lower() == our_call:
214
+ # It's a MessagePacket and it's for us!
215
+ # let any threads do their thing, then ack
216
+ # send an ack last
217
+ tx.send(
218
+ packets.AckPacket(
219
+ from_call=CONF.callsign,
220
+ to_call=from_call,
221
+ msgNo=msg_id,
222
+ ),
223
+ )
224
+
225
+ self.process_our_message_packet(packet)
226
+ else:
227
+ # Packet wasn't meant for us!
228
+ self.process_other_packet(packet, for_us=False)
229
+ else:
230
+ self.process_other_packet(
231
+ packet, for_us=(to_call.lower() == our_call),
232
+ )
233
+ LOG.debug(f"Packet processing complete for pkt '{packet.key}'")
234
+ return False
235
+
236
+ @abc.abstractmethod
237
+ def process_our_message_packet(self, packet):
238
+ """Process a MessagePacket destined for us!"""
239
+
240
+ def process_other_packet(self, packet, for_us=False):
241
+ """Process an APRS Packet that isn't a message or ack"""
242
+ if not for_us:
243
+ LOG.info("Got a packet not meant for us.")
244
+ else:
245
+ LOG.info("Got a non AckPacket/MessagePacket")
246
+
247
+
248
+ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
249
+ """Process the packet through the plugin manager.
250
+
251
+ This is the main aprsd server plugin processing thread."""
252
+
253
+ def process_other_packet(self, packet, for_us=False):
254
+ pm = plugin.PluginManager()
255
+ try:
256
+ results = pm.run_watchlist(packet)
257
+ for reply in results:
258
+ if isinstance(reply, list):
259
+ for subreply in reply:
260
+ LOG.debug(f"Sending '{subreply}'")
261
+ if isinstance(subreply, packets.Packet):
262
+ tx.send(subreply)
263
+ else:
264
+ wl = CONF.watch_list
265
+ to_call = wl["alert_callsign"]
266
+ tx.send(
267
+ packets.MessagePacket(
268
+ from_call=CONF.callsign,
269
+ to_call=to_call,
270
+ message_text=subreply,
271
+ ),
272
+ )
273
+ elif isinstance(reply, packets.Packet):
274
+ # We have a message based object.
275
+ tx.send(reply)
276
+ except Exception as ex:
277
+ LOG.error("Plugin failed!!!")
278
+ LOG.exception(ex)
279
+
280
+ def process_our_message_packet(self, packet):
281
+ """Send the packet through the plugins."""
282
+ from_call = packet.from_call
283
+ if packet.addresse:
284
+ to_call = packet.addresse
285
+ else:
286
+ to_call = None
287
+
288
+ pm = plugin.PluginManager()
289
+ try:
290
+ results = pm.run(packet)
291
+ replied = False
292
+ for reply in results:
293
+ if isinstance(reply, list):
294
+ # one of the plugins wants to send multiple messages
295
+ replied = True
296
+ for subreply in reply:
297
+ LOG.debug(f"Sending '{subreply}'")
298
+ if isinstance(subreply, packets.Packet):
299
+ tx.send(subreply)
300
+ else:
301
+ tx.send(
302
+ packets.MessagePacket(
303
+ from_call=CONF.callsign,
304
+ to_call=from_call,
305
+ message_text=subreply,
306
+ ),
307
+ )
308
+ elif isinstance(reply, packets.Packet):
309
+ # We have a message based object.
310
+ tx.send(reply)
311
+ replied = True
312
+ else:
313
+ replied = True
314
+ # A plugin can return a null message flag which signals
315
+ # us that they processed the message correctly, but have
316
+ # nothing to reply with, so we avoid replying with a
317
+ # usage string
318
+ if reply is not packets.NULL_MESSAGE:
319
+ LOG.debug(f"Sending '{reply}'")
320
+ tx.send(
321
+ packets.MessagePacket(
322
+ from_call=CONF.callsign,
323
+ to_call=from_call,
324
+ message_text=reply,
325
+ ),
326
+ )
327
+
328
+ # If the message was for us and we didn't have a
329
+ # response, then we send a usage statement.
330
+ if to_call == CONF.callsign and not replied:
331
+ LOG.warning("Sending help!")
332
+ message_text = "Unknown command! Send 'help' message for help"
333
+ tx.send(
334
+ packets.MessagePacket(
335
+ from_call=CONF.callsign,
336
+ to_call=from_call,
337
+ message_text=message_text,
338
+ ),
339
+ )
340
+ except Exception as ex:
341
+ LOG.error("Plugin failed!!!")
342
+ LOG.exception(ex)
343
+ # Do we need to send a reply?
344
+ if to_call == CONF.callsign:
345
+ reply = "A Plugin failed! try again?"
346
+ tx.send(
347
+ packets.MessagePacket(
348
+ from_call=CONF.callsign,
349
+ to_call=from_call,
350
+ message_text=reply,
351
+ ),
352
+ )
353
+
354
+ LOG.debug("Completed process_our_message_packet")
aprsd/threads/stats.py ADDED
@@ -0,0 +1,44 @@
1
+ import logging
2
+ import threading
3
+ import time
4
+
5
+ from oslo_config import cfg
6
+ import wrapt
7
+
8
+ from aprsd.stats import collector
9
+ from aprsd.threads import APRSDThread
10
+ from aprsd.utils import objectstore
11
+
12
+
13
+ CONF = cfg.CONF
14
+ LOG = logging.getLogger("APRSD")
15
+
16
+
17
+ class StatsStore(objectstore.ObjectStoreMixin):
18
+ """Container to save the stats from the collector."""
19
+ lock = threading.Lock()
20
+ data = {}
21
+
22
+ @wrapt.synchronized(lock)
23
+ def add(self, stats: dict):
24
+ self.data = stats
25
+
26
+
27
+ class APRSDStatsStoreThread(APRSDThread):
28
+ """Save APRSD Stats to disk periodically."""
29
+
30
+ # how often in seconds to write the file
31
+ save_interval = 10
32
+
33
+ def __init__(self):
34
+ super().__init__("StatsStore")
35
+
36
+ def loop(self):
37
+ if self.loop_count % self.save_interval == 0:
38
+ stats = collector.Collector().collect()
39
+ ss = StatsStore()
40
+ ss.add(stats)
41
+ ss.save()
42
+
43
+ time.sleep(1)
44
+ return True