aprsd 3.4.3__py3-none-any.whl → 3.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,9 +37,10 @@ class PacketList(objectstore.ObjectStoreMixin):
37
37
  self._total_rx += 1
38
38
  self._add(packet)
39
39
  ptype = packet.__class__.__name__
40
- if ptype not in self.data["types"]:
41
- self.data["types"][ptype] = {"tx": 0, "rx": 0}
42
- self.data["types"][ptype]["rx"] += 1
40
+ type_stats = self.data["types"].setdefault(
41
+ ptype, {"tx": 0, "rx": 0},
42
+ )
43
+ type_stats["rx"] += 1
43
44
 
44
45
  def tx(self, packet: type[core.Packet]):
45
46
  """Add a packet that was received."""
@@ -47,9 +48,10 @@ class PacketList(objectstore.ObjectStoreMixin):
47
48
  self._total_tx += 1
48
49
  self._add(packet)
49
50
  ptype = packet.__class__.__name__
50
- if ptype not in self.data["types"]:
51
- self.data["types"][ptype] = {"tx": 0, "rx": 0}
52
- self.data["types"][ptype]["tx"] += 1
51
+ type_stats = self.data["types"].setdefault(
52
+ ptype, {"tx": 0, "rx": 0},
53
+ )
54
+ type_stats["tx"] += 1
53
55
 
54
56
  def add(self, packet):
55
57
  with self.lock:
@@ -81,28 +83,16 @@ class PacketList(objectstore.ObjectStoreMixin):
81
83
  return self._total_tx
82
84
 
83
85
  def stats(self, serializable=False) -> dict:
84
- # limit the number of packets to return to 50
85
86
  with self.lock:
86
- tmp = OrderedDict(
87
- reversed(
88
- list(
89
- self.data.get("packets", OrderedDict()).items(),
90
- ),
91
- ),
92
- )
93
- pkts = []
94
- count = 1
95
- for packet in tmp:
96
- pkts.append(tmp[packet])
97
- count += 1
98
- if count > CONF.packet_list_stats_maxlen:
99
- break
87
+ # Get last N packets directly using list slicing
88
+ packets_list = list(self.data.get("packets", {}).values())
89
+ pkts = packets_list[-CONF.packet_list_stats_maxlen:][::-1]
100
90
 
101
91
  stats = {
102
- "total_tracked": self._total_rx + self._total_rx,
92
+ "total_tracked": self._total_rx + self._total_tx, # Fixed typo: was rx + rx
103
93
  "rx": self._total_rx,
104
94
  "tx": self._total_tx,
105
- "types": self.data.get("types", []),
95
+ "types": self.data.get("types", {}), # Changed default from [] to {}
106
96
  "packet_count": len(self.data.get("packets", [])),
107
97
  "maxlen": self.maxlen,
108
98
  "packets": pkts,
aprsd/plugin.py CHANGED
@@ -470,9 +470,12 @@ class PluginManager:
470
470
  def reload_plugins(self):
471
471
  with self.lock:
472
472
  del self._pluggy_pm
473
- self.setup_plugins()
473
+ self.setup_plugins(load_help_plugin=CONF.load_help_plugin)
474
474
 
475
- def setup_plugins(self, load_help_plugin=True):
475
+ def setup_plugins(
476
+ self, load_help_plugin=True,
477
+ plugin_list=[],
478
+ ):
476
479
  """Create the plugin manager and register plugins."""
477
480
 
478
481
  LOG.info("Loading APRSD Plugins")
@@ -481,9 +484,13 @@ class PluginManager:
481
484
  _help = HelpPlugin()
482
485
  self._pluggy_pm.register(_help)
483
486
 
484
- enabled_plugins = CONF.enabled_plugins
485
- if enabled_plugins:
486
- for p_name in enabled_plugins:
487
+ # if plugins_list is passed in, only load
488
+ # those plugins.
489
+ if plugin_list:
490
+ for plugin_name in plugin_list:
491
+ self._load_plugin(plugin_name)
492
+ elif CONF.enabled_plugins:
493
+ for p_name in CONF.enabled_plugins:
487
494
  self._load_plugin(p_name)
488
495
  else:
489
496
  # Enabled plugins isn't set, so we default to loading all of
aprsd/plugins/email.py CHANGED
@@ -12,6 +12,7 @@ import imapclient
12
12
  from oslo_config import cfg
13
13
 
14
14
  from aprsd import packets, plugin, threads, utils
15
+ from aprsd.stats import collector
15
16
  from aprsd.threads import tx
16
17
  from aprsd.utils import trace
17
18
 
@@ -126,6 +127,11 @@ class EmailPlugin(plugin.APRSDRegexCommandPluginBase):
126
127
 
127
128
  shortcuts = _build_shortcuts_dict()
128
129
  LOG.info(f"Email shortcuts {shortcuts}")
130
+
131
+ # Register the EmailStats producer with the stats collector
132
+ # We do this here to prevent EmailStats from being registered
133
+ # when email is not enabled in the config file.
134
+ collector.Collector().register_producer(EmailStats)
129
135
  else:
130
136
  LOG.info("Email services not enabled.")
131
137
  self.enabled = False
aprsd/plugins/fortune.py CHANGED
@@ -8,7 +8,7 @@ from aprsd.utils import trace
8
8
 
9
9
  LOG = logging.getLogger("APRSD")
10
10
 
11
- DEFAULT_FORTUNE_PATH = '/usr/games/fortune'
11
+ DEFAULT_FORTUNE_PATH = "/usr/games/fortune"
12
12
 
13
13
 
14
14
  class FortunePlugin(plugin.APRSDRegexCommandPluginBase):
@@ -45,7 +45,7 @@ class FortunePlugin(plugin.APRSDRegexCommandPluginBase):
45
45
  command,
46
46
  shell=True,
47
47
  timeout=3,
48
- universal_newlines=True,
48
+ text=True,
49
49
  )
50
50
  output = (
51
51
  output.replace("\r", "")
aprsd/plugins/location.py CHANGED
@@ -2,8 +2,10 @@ import logging
2
2
  import re
3
3
  import time
4
4
 
5
- from geopy.geocoders import ArcGIS, AzureMaps, Baidu, Bing, GoogleV3
6
- from geopy.geocoders import HereV7, Nominatim, OpenCage, TomTom, What3WordsV3, Woosmap
5
+ from geopy.geocoders import (
6
+ ArcGIS, AzureMaps, Baidu, Bing, GoogleV3, HereV7, Nominatim, OpenCage,
7
+ TomTom, What3WordsV3, Woosmap,
8
+ )
7
9
  from oslo_config import cfg
8
10
 
9
11
  from aprsd import packets, plugin, plugin_utils
@@ -39,8 +41,8 @@ class USGov:
39
41
  result = plugin_utils.get_weather_gov_for_gps(lat, lon)
40
42
  # LOG.info(f"WEATHER: {result}")
41
43
  # LOG.info(f"area description {result['location']['areaDescription']}")
42
- if 'location' in result:
43
- loc = UsLocation(result['location']['areaDescription'])
44
+ if "location" in result:
45
+ loc = UsLocation(result["location"]["areaDescription"])
44
46
  else:
45
47
  loc = UsLocation("Unknown Location")
46
48
 
aprsd/stats/__init__.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from aprsd import plugin
2
2
  from aprsd.client import stats as client_stats
3
3
  from aprsd.packets import packet_list, seen_list, tracker, watch_list
4
- from aprsd.plugins import email
5
4
  from aprsd.stats import app, collector
6
5
  from aprsd.threads import aprsd
7
6
 
@@ -15,6 +14,5 @@ stats_collector.register_producer(watch_list.WatchList)
15
14
  stats_collector.register_producer(tracker.PacketTrack)
16
15
  stats_collector.register_producer(plugin.PluginManager)
17
16
  stats_collector.register_producer(aprsd.APRSDThreadList)
18
- stats_collector.register_producer(email.EmailStats)
19
17
  stats_collector.register_producer(client_stats.APRSClientStats)
20
18
  stats_collector.register_producer(seen_list.SeenList)
aprsd/stats/collector.py CHANGED
@@ -10,7 +10,7 @@ LOG = logging.getLogger("APRSD")
10
10
  @runtime_checkable
11
11
  class StatsProducer(Protocol):
12
12
  """The StatsProducer protocol is used to define the interface for collecting stats."""
13
- def stats(self, serializeable=False) -> dict:
13
+ def stats(self, serializable=False) -> dict:
14
14
  """provide stats in a dictionary format."""
15
15
  ...
16
16
 
@@ -35,3 +35,8 @@ class Collector:
35
35
  if not isinstance(producer_name, StatsProducer):
36
36
  raise TypeError(f"Producer {producer_name} is not a StatsProducer")
37
37
  self.producers.append(producer_name)
38
+
39
+ def unregister_producer(self, producer_name: Callable):
40
+ if not isinstance(producer_name, StatsProducer):
41
+ raise TypeError(f"Producer {producer_name} is not a StatsProducer")
42
+ self.producers.remove(producer_name)
aprsd/threads/aprsd.py CHANGED
@@ -2,6 +2,7 @@ import abc
2
2
  import datetime
3
3
  import logging
4
4
  import threading
5
+ import time
5
6
  from typing import List
6
7
 
7
8
  import wrapt
@@ -14,6 +15,8 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
14
15
  """Base class for all threads in APRSD."""
15
16
 
16
17
  loop_count = 1
18
+ _pause = False
19
+ thread_stop = False
17
20
 
18
21
  def __init__(self, name):
19
22
  super().__init__(name=name)
@@ -26,7 +29,18 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
26
29
  if self.thread_stop:
27
30
  return True
28
31
 
32
+ def pause(self):
33
+ """Logically pause the processing of the main loop."""
34
+ LOG.debug(f"Pausing thread '{self.name}' loop_count {self.loop_count}")
35
+ self._pause = True
36
+
37
+ def unpause(self):
38
+ """Logically resume processing of the main loop."""
39
+ LOG.debug(f"Resuming thread '{self.name}' loop_count {self.loop_count}")
40
+ self._pause = False
41
+
29
42
  def stop(self):
43
+ LOG.debug(f"Stopping thread '{self.name}'")
30
44
  self.thread_stop = True
31
45
 
32
46
  @abc.abstractmethod
@@ -47,11 +61,14 @@ class APRSDThread(threading.Thread, metaclass=abc.ABCMeta):
47
61
  def run(self):
48
62
  LOG.debug("Starting")
49
63
  while not self._should_quit():
50
- self.loop_count += 1
51
- can_loop = self.loop()
52
- self._last_loop = datetime.datetime.now()
53
- if not can_loop:
54
- self.stop()
64
+ if self._pause:
65
+ time.sleep(1)
66
+ else:
67
+ self.loop_count += 1
68
+ can_loop = self.loop()
69
+ self._last_loop = datetime.datetime.now()
70
+ if not can_loop:
71
+ self.stop()
55
72
  self._cleanup()
56
73
  APRSDThreadList().remove(self)
57
74
  LOG.debug("Exiting")
@@ -71,6 +88,13 @@ class APRSDThreadList:
71
88
  cls.threads_list = []
72
89
  return cls._instance
73
90
 
91
+ def __contains__(self, name):
92
+ """See if we have a thread in our list"""
93
+ for t in self.threads_list:
94
+ if t.name == name:
95
+ return True
96
+ return False
97
+
74
98
  def stats(self, serializable=False) -> dict:
75
99
  stats = {}
76
100
  for th in self.threads_list:
@@ -103,6 +127,24 @@ class APRSDThreadList:
103
127
  LOG.info(F"{th.name} packet {th.packet}")
104
128
  th.stop()
105
129
 
130
+ @wrapt.synchronized
131
+ def pause_all(self):
132
+ """Iterate over all threads and pause them."""
133
+ for th in self.threads_list:
134
+ LOG.info(f"Pausing Thread {th.name}")
135
+ if hasattr(th, "packet"):
136
+ LOG.info(F"{th.name} packet {th.packet}")
137
+ th.pause()
138
+
139
+ @wrapt.synchronized
140
+ def unpause_all(self):
141
+ """Iterate over all threads and resume them."""
142
+ for th in self.threads_list:
143
+ LOG.info(f"Resuming Thread {th.name}")
144
+ if hasattr(th, "packet"):
145
+ LOG.info(F"{th.name} packet {th.packet}")
146
+ th.unpause()
147
+
106
148
  @wrapt.synchronized(lock)
107
149
  def info(self):
108
150
  """Go through all the threads and collect info about each."""
@@ -5,6 +5,7 @@ import tracemalloc
5
5
 
6
6
  from loguru import logger
7
7
  from oslo_config import cfg
8
+ import timeago
8
9
 
9
10
  from aprsd import packets, utils
10
11
  from aprsd.client import client_factory
@@ -98,6 +99,13 @@ class KeepAliveThread(APRSDThread):
98
99
 
99
100
  # check the APRS connection
100
101
  cl = client_factory.create()
102
+ cl_stats = cl.stats()
103
+ ka = cl_stats.get("connection_keepalive", None)
104
+ if ka:
105
+ keepalive = timeago.format(ka)
106
+ else:
107
+ keepalive = "N/A"
108
+ LOGU.opt(colors=True).info(f"<green>Client keepalive {keepalive}</green>")
101
109
  # Reset the connection if it's dead and this isn't our
102
110
  # First time through the loop.
103
111
  # The first time through the loop can happen at startup where
aprsd/threads/rx.py CHANGED
@@ -11,6 +11,7 @@ from aprsd.client import client_factory
11
11
  from aprsd.packets import collector
12
12
  from aprsd.packets import log as packet_log
13
13
  from aprsd.threads import APRSDThread, tx
14
+ from aprsd.utils import trace
14
15
 
15
16
 
16
17
  CONF = cfg.CONF
@@ -18,10 +19,11 @@ LOG = logging.getLogger("APRSD")
18
19
 
19
20
 
20
21
  class APRSDRXThread(APRSDThread):
22
+ _client = None
23
+
21
24
  def __init__(self, packet_queue):
22
25
  super().__init__("RX_PKT")
23
26
  self.packet_queue = packet_queue
24
- self._client = client_factory.create()
25
27
 
26
28
  def stop(self):
27
29
  self.thread_stop = True
@@ -33,6 +35,12 @@ class APRSDRXThread(APRSDThread):
33
35
  self._client = client_factory.create()
34
36
  time.sleep(1)
35
37
  return True
38
+
39
+ if not self._client.is_connected:
40
+ self._client = client_factory.create()
41
+ time.sleep(1)
42
+ return True
43
+
36
44
  # setup the consumer of messages and block until a messages
37
45
  try:
38
46
  # This will register a packet consumer with aprslib
@@ -63,6 +71,7 @@ class APRSDRXThread(APRSDThread):
63
71
  self._client.reset()
64
72
  time.sleep(5)
65
73
  # Continue to loop
74
+ time.sleep(1)
66
75
  return True
67
76
 
68
77
  def _process_packet(self, *args, **kwargs):
@@ -84,6 +93,7 @@ class APRSDDupeRXThread(APRSDRXThread):
84
93
  to be processed.
85
94
  """
86
95
 
96
+ @trace.trace
87
97
  def process_packet(self, *args, **kwargs):
88
98
  """This handles the processing of an inbound packet.
89
99
 
@@ -96,7 +106,6 @@ class APRSDDupeRXThread(APRSDRXThread):
96
106
  PluginProcessPacketThread for processing.
97
107
  """
98
108
  packet = self._client.decode_packet(*args, **kwargs)
99
- # LOG.debug(raw)
100
109
  packet_log.log(packet)
101
110
  pkt_list = packets.PacketList()
102
111
 
@@ -113,6 +122,12 @@ class APRSDDupeRXThread(APRSDRXThread):
113
122
  # Find the packet in the list of already seen packets
114
123
  # Based on the packet.key
115
124
  found = pkt_list.find(packet)
125
+ if not packet.msgNo:
126
+ # If the packet doesn't have a message id
127
+ # then there is no reliable way to detect
128
+ # if it's a dupe, so we just pass it on.
129
+ # it shouldn't get acked either.
130
+ found = False
116
131
  except KeyError:
117
132
  found = False
118
133
 
@@ -151,6 +166,11 @@ class APRSDProcessPacketThread(APRSDThread):
151
166
  def __init__(self, packet_queue):
152
167
  self.packet_queue = packet_queue
153
168
  super().__init__("ProcessPKT")
169
+ if not CONF.enable_sending_ack_packets:
170
+ LOG.warning(
171
+ "Sending ack packets is disabled, messages "
172
+ "will not be acknowledged.",
173
+ )
154
174
 
155
175
  def process_ack_packet(self, packet):
156
176
  """We got an ack for a message, no need to resend it."""
@@ -214,13 +234,14 @@ class APRSDProcessPacketThread(APRSDThread):
214
234
  # It's a MessagePacket and it's for us!
215
235
  # let any threads do their thing, then ack
216
236
  # send an ack last
217
- tx.send(
218
- packets.AckPacket(
219
- from_call=CONF.callsign,
220
- to_call=from_call,
221
- msgNo=msg_id,
222
- ),
223
- )
237
+ if msg_id:
238
+ tx.send(
239
+ packets.AckPacket(
240
+ from_call=CONF.callsign,
241
+ to_call=from_call,
242
+ msgNo=msg_id,
243
+ ),
244
+ )
224
245
 
225
246
  self.process_our_message_packet(packet)
226
247
  else:
@@ -240,7 +261,7 @@ class APRSDProcessPacketThread(APRSDThread):
240
261
  def process_other_packet(self, packet, for_us=False):
241
262
  """Process an APRS Packet that isn't a message or ack"""
242
263
  if not for_us:
243
- LOG.info("Got a packet not meant for us.")
264
+ LOG.info("Got a packet meant for someone else '{packet.to_call}'")
244
265
  else:
245
266
  LOG.info("Got a non AckPacket/MessagePacket")
246
267
 
@@ -328,8 +349,15 @@ class APRSDPluginProcessPacketThread(APRSDProcessPacketThread):
328
349
  # If the message was for us and we didn't have a
329
350
  # response, then we send a usage statement.
330
351
  if to_call == CONF.callsign and not replied:
331
- LOG.warning("Sending help!")
332
- message_text = "Unknown command! Send 'help' message for help"
352
+
353
+ # Tailor the messages accordingly
354
+ if CONF.load_help_plugin:
355
+ LOG.warning("Sending help!")
356
+ message_text = "Unknown command! Send 'help' message for help"
357
+ else:
358
+ LOG.warning("Unknown command!")
359
+ message_text = "Unknown command!"
360
+
333
361
  tx.send(
334
362
  packets.MessagePacket(
335
363
  from_call=CONF.callsign,
aprsd/threads/tx.py CHANGED
@@ -48,12 +48,15 @@ def send(packet: core.Packet, direct=False, aprs_client=None):
48
48
  """Send a packet either in a thread or directly to the client."""
49
49
  # prepare the packet for sending.
50
50
  # This constructs the packet.raw
51
- packet.prepare()
51
+ packet.prepare(create_msg_number=True)
52
52
  # Have to call the collector to track the packet
53
53
  # After prepare, as prepare assigns the msgNo
54
54
  collector.PacketCollector().tx(packet)
55
55
  if isinstance(packet, core.AckPacket):
56
- _send_ack(packet, direct=direct, aprs_client=aprs_client)
56
+ if CONF.enable_sending_ack_packets:
57
+ _send_ack(packet, direct=direct, aprs_client=aprs_client)
58
+ else:
59
+ LOG.info("Sending ack packets is disabled. Not sending AckPacket.")
57
60
  else:
58
61
  _send_packet(packet, direct=direct, aprs_client=aprs_client)
59
62
 
@@ -89,6 +92,9 @@ def _send_direct(packet, aprs_client=None):
89
92
  except Exception as e:
90
93
  LOG.error(f"Failed to send packet: {packet}")
91
94
  LOG.error(e)
95
+ return False
96
+ else:
97
+ return True
92
98
 
93
99
 
94
100
  class SendPacketThread(aprsd_threads.APRSDThread):
@@ -150,8 +156,17 @@ class SendPacketThread(aprsd_threads.APRSDThread):
150
156
  # no attempt time, so lets send it, and start
151
157
  # tracking the time.
152
158
  packet.last_send_time = int(round(time.time()))
153
- _send_direct(packet)
154
- packet.send_count += 1
159
+ sent = False
160
+ try:
161
+ sent = _send_direct(packet)
162
+ except Exception:
163
+ LOG.error(f"Failed to send packet: {packet}")
164
+ else:
165
+ # If an exception happens while sending
166
+ # we don't want this attempt to count
167
+ # against the packet
168
+ if sent:
169
+ packet.send_count += 1
155
170
 
156
171
  time.sleep(1)
157
172
  # Make sure we get called again.
@@ -199,8 +214,18 @@ class SendAckThread(aprsd_threads.APRSDThread):
199
214
  send_now = True
200
215
 
201
216
  if send_now:
202
- _send_direct(self.packet)
203
- self.packet.send_count += 1
217
+ sent = False
218
+ try:
219
+ sent = _send_direct(self.packet)
220
+ except Exception:
221
+ LOG.error(f"Failed to send packet: {self.packet}")
222
+ else:
223
+ # If an exception happens while sending
224
+ # we don't want this attempt to count
225
+ # against the packet
226
+ if sent:
227
+ self.packet.send_count += 1
228
+
204
229
  self.packet.last_send_time = int(round(time.time()))
205
230
 
206
231
  time.sleep(1)
aprsd/utils/__init__.py CHANGED
@@ -174,14 +174,29 @@ def load_entry_points(group):
174
174
  print(traceback.format_exc(), file=sys.stderr)
175
175
 
176
176
 
177
- def calculate_initial_compass_bearing(start, end):
178
- if (type(start) != tuple) or (type(end) != tuple): # noqa: E721
177
+ def calculate_initial_compass_bearing(point_a, point_b):
178
+ """
179
+ Calculates the bearing between two points.
180
+ The formulae used is the following:
181
+ θ = atan2(sin(Δlong).cos(lat2),
182
+ cos(lat1).sin(lat2) − sin(lat1).cos(lat2).cos(Δlong))
183
+ :Parameters:
184
+ - `pointA: The tuple representing the latitude/longitude for the
185
+ first point. Latitude and longitude must be in decimal degrees
186
+ - `pointB: The tuple representing the latitude/longitude for the
187
+ second point. Latitude and longitude must be in decimal degrees
188
+ :Returns:
189
+ The bearing in degrees
190
+ :Returns Type:
191
+ float
192
+ """
193
+ if (type(point_a) != tuple) or (type(point_b) != tuple): # noqa: E721
179
194
  raise TypeError("Only tuples are supported as arguments")
180
195
 
181
- lat1 = math.radians(float(start[0]))
182
- lat2 = math.radians(float(end[0]))
196
+ lat1 = math.radians(float(point_a[0]))
197
+ lat2 = math.radians(float(point_b[0]))
183
198
 
184
- diff_long = math.radians(float(end[1]) - float(start[1]))
199
+ diff_long = math.radians(float(point_b[1]) - float(point_a[1]))
185
200
 
186
201
  x = math.sin(diff_long) * math.cos(lat2)
187
202
  y = math.cos(lat1) * math.sin(lat2) - (
aprsd/utils/counter.py CHANGED
@@ -1,4 +1,3 @@
1
- from multiprocessing import RawValue
2
1
  import random
3
2
  import threading
4
3
 
@@ -10,12 +9,12 @@ MAX_PACKET_ID = 9999
10
9
 
11
10
  class PacketCounter:
12
11
  """
13
- Global Packet id counter class.
12
+ Global Packet ID counter class.
14
13
 
15
- This is a singleton based class that keeps
14
+ This is a singleton-based class that keeps
16
15
  an incrementing counter for all packets to
17
- be sent. All new Packet objects gets a new
18
- message id, which is the next number available
16
+ be sent. All new Packet objects get a new
17
+ message ID, which is the next number available
19
18
  from the PacketCounter.
20
19
 
21
20
  """
@@ -27,25 +26,29 @@ class PacketCounter:
27
26
  """Make this a singleton class."""
28
27
  if cls._instance is None:
29
28
  cls._instance = super().__new__(cls, *args, **kwargs)
30
- cls._instance.val = RawValue("i", random.randint(1, MAX_PACKET_ID))
29
+ cls._instance._val = random.randint(1, MAX_PACKET_ID) # Initialize counter
31
30
  return cls._instance
32
31
 
33
32
  @wrapt.synchronized(lock)
34
33
  def increment(self):
35
- if self.val.value == MAX_PACKET_ID:
36
- self.val.value = 1
34
+ """Increment the counter, reset if it exceeds MAX_PACKET_ID."""
35
+ if self._val == MAX_PACKET_ID:
36
+ self._val = 1
37
37
  else:
38
- self.val.value += 1
38
+ self._val += 1
39
39
 
40
40
  @property
41
41
  @wrapt.synchronized(lock)
42
42
  def value(self):
43
- return str(self.val.value)
43
+ """Get the current value as a string."""
44
+ return str(self._val)
44
45
 
45
46
  @wrapt.synchronized(lock)
46
47
  def __repr__(self):
47
- return str(self.val.value)
48
+ """String representation of the current value."""
49
+ return str(self._val)
48
50
 
49
51
  @wrapt.synchronized(lock)
50
52
  def __str__(self):
51
- return str(self.val.value)
53
+ """String representation of the current value."""
54
+ return str(self._val)
aprsd/utils/trace.py CHANGED
@@ -28,7 +28,8 @@ def trace(*dec_args, **dec_kwargs):
28
28
 
29
29
  def _decorator(f):
30
30
 
31
- func_name = f.__name__
31
+ func_name = f.__qualname__
32
+ func_file = "/".join(f.__code__.co_filename.split("/")[-4:])
32
33
 
33
34
  @functools.wraps(f)
34
35
  def trace_logging_wrapper(*args, **kwargs):
@@ -46,10 +47,11 @@ def trace(*dec_args, **dec_kwargs):
46
47
 
47
48
  if pass_filter:
48
49
  logger.debug(
49
- "==> %(func)s: call %(all_args)r",
50
+ "==> %(func)s: call %(all_args)r file: %(file)s",
50
51
  {
51
52
  "func": func_name,
52
53
  "all_args": str(all_args),
54
+ "file": func_file,
53
55
  },
54
56
  )
55
57