fprime-gds 3.3.3__py3-none-any.whl → 3.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
fastentrypoints.py ADDED
@@ -0,0 +1,115 @@
1
+ # noqa: D300,D400
2
+ # Copyright (c) 2016, Aaron Christianson
3
+ # All rights reserved.
4
+ #
5
+ # Redistribution and use in source and binary forms, with or without
6
+ # modification, are permitted provided that the following conditions are
7
+ # met:
8
+ #
9
+ # 1. Redistributions of source code must retain the above copyright
10
+ # notice, this list of conditions and the following disclaimer.
11
+ #
12
+ # 2. Redistributions in binary form must reproduce the above copyright
13
+ # notice, this list of conditions and the following disclaimer in the
14
+ # documentation and/or other materials provided with the distribution.
15
+ #
16
+ # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
17
+ # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18
+ # TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
19
+ # PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20
+ # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21
+ # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
22
+ # TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
23
+ # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
24
+ # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
25
+ # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
26
+ # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
+ """
28
+ Monkey patch setuptools to write faster console_scripts with this format:
29
+
30
+ import sys
31
+ from mymodule import entry_function
32
+ sys.exit(entry_function())
33
+
34
+ This is better.
35
+
36
+ (c) 2016, Aaron Christianson
37
+ https://github.com/ninjaaron/fast-entry_points
38
+ """
39
+ import re
40
+
41
+ from setuptools.command import easy_install
42
+
43
+ TEMPLATE = r"""
44
+ # -*- coding: utf-8 -*-
45
+ # EASY-INSTALL-ENTRY-SCRIPT: '{3}','{4}','{5}'
46
+ __requires__ = '{3}'
47
+ import re
48
+ import sys
49
+
50
+ from {0} import {1}
51
+
52
+ if __name__ == '__main__':
53
+ sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
54
+ sys.exit({2}())
55
+ """.lstrip()
56
+
57
+
58
+ @classmethod
59
+ def get_args(cls, dist, header=None): # noqa: D205,D400
60
+ """
61
+ Yield write_script() argument tuples for a distribution's
62
+ console_scripts and gui_scripts entry points.
63
+ """
64
+ if header is None:
65
+ # pylint: disable=E1101
66
+ header = cls.get_header()
67
+ spec = str(dist.as_requirement())
68
+ for type_ in "console", "gui":
69
+ group = f'{type_}_scripts'
70
+ for name, ep in dist.get_entry_map(group).items():
71
+ # ensure_safe_name
72
+ if re.search(r"[\\/]", name):
73
+ raise ValueError("Path separators not allowed in script names")
74
+ script_text = TEMPLATE.format(
75
+ ep.module_name, ep.attrs[0], ".".join(ep.attrs), spec, group, name
76
+ )
77
+ # pylint: disable=E1101
78
+ args = cls._get_script_args(type_, name, header, script_text)
79
+ yield from args
80
+
81
+
82
+ # pylint: disable=E1101
83
+ easy_install.ScriptWriter.get_args = get_args
84
+
85
+
86
+ def main():
87
+ import os
88
+ import shutil
89
+ import sys
90
+
91
+ dests = sys.argv[1:] or ["."]
92
+ filename = re.sub(r"\.pyc$", ".py", __file__)
93
+
94
+ for dst in dests:
95
+ shutil.copy(filename, dst)
96
+ manifest_path = os.path.join(dst, "MANIFEST.in")
97
+ setup_path = os.path.join(dst, "setup.py")
98
+
99
+ # Insert the include statement to MANIFEST.in if not present
100
+ with open(manifest_path, "a+") as manifest:
101
+ manifest.seek(0)
102
+ manifest_content = manifest.read()
103
+ if "include fastentrypoints.py" not in manifest_content:
104
+ manifest.write(
105
+ ("\n" if manifest_content else "") + "include fastentrypoints.py"
106
+ )
107
+
108
+ # Insert the import statement to setup.py if not present
109
+ with open(setup_path, "a+") as setup:
110
+ setup.seek(0)
111
+ setup_content = setup.read()
112
+ if "import fastentrypoints" not in setup_content:
113
+ setup.seek(0)
114
+ setup.truncate()
115
+ setup.write("import fastentrypoints\n" + setup_content)
@@ -40,13 +40,13 @@ class FramerDeframer(abc.ABC):
40
40
  """
41
41
  Deframes the incoming data from the specified format. Produces exactly one packet, and leftover bytes. Users
42
42
  wanting all packets to be deframed should call "deframe_all". If no full packet is available, this method
43
- returns None. Expects incoming raw bytes to deframe, and returns a deframed packet or None, and the leftover
44
- bytes that were unused. Will search and discard data up until a start token is found. Note: data will be
45
- consumed up to the first start token found.
43
+ returns None. Expects incoming raw bytes to deframe, and returns a deframed packet or None, the leftover
44
+ bytes that were unused, and any bytes discarded from the existing data stream. Will search and discard data up
45
+ until a start token is found. Note: data will be consumed up to the first start token found.
46
46
 
47
47
  :param data: framed data bytes
48
48
  :param no_copy: (optional) will prevent extra copy if True, but "data" input will be destroyed.
49
- :return: (packet as array of bytes or None, leftover bytes)
49
+ :return: (packet as array of bytes or None, leftover bytes, any discarded data)
50
50
  """
51
51
 
52
52
  def deframe_all(self, data, no_copy):
@@ -56,16 +56,18 @@ class FramerDeframer(abc.ABC):
56
56
 
57
57
  :param data: framed data bytes
58
58
  :param no_copy: (optional) will prevent extra copy if True, but "data" input will be destroyed.
59
- :return:
59
+ :return: list of packets, remaining data, discarded/unframed/garbage data
60
60
  """
61
61
  packets = []
62
62
  if not no_copy:
63
63
  data = copy.copy(data)
64
+ discarded_aggregate = b""
64
65
  while True:
65
66
  # Deframe and return only on None
66
- (packet, data) = self.deframe(data, no_copy=True)
67
+ (packet, data, discarded) = self.deframe(data, no_copy=True)
68
+ discarded_aggregate += discarded
67
69
  if packet is None:
68
- return packets, data
70
+ return packets, data, discarded_aggregate
69
71
  packets.append(packet)
70
72
 
71
73
 
@@ -147,6 +149,7 @@ class FpFramerDeframer(FramerDeframer):
147
149
  :param no_copy: (optional) will prevent extra copy if True, but "data" input will be destroyed.
148
150
  :return: (packet as array of bytes or None, leftover bytes)
149
151
  """
152
+ discarded = b""
150
153
  if not no_copy:
151
154
  data = copy.copy(data)
152
155
  # Continue until there is not enough data for the header, or until a packet is found (return)
@@ -163,6 +166,7 @@ class FpFramerDeframer(FramerDeframer):
163
166
  start != FpFramerDeframer.START_TOKEN
164
167
  or data_size >= FpFramerDeframer.MAXIMUM_DATA_SIZE
165
168
  ):
169
+ discarded += data[0:1]
166
170
  data = data[1:]
167
171
  continue
168
172
  # If the pool is large enough to read the whole frame, then read it
@@ -175,17 +179,18 @@ class FpFramerDeframer(FramerDeframer):
175
179
  data[: data_size + FpFramerDeframer.HEADER_SIZE]
176
180
  ):
177
181
  data = data[total_size:]
178
- return deframed, data
182
+ return deframed, data, discarded
179
183
  print(
180
184
  "[WARNING] Checksum validation failed. Have you correctly set '--comm-checksum-type'",
181
185
  file=sys.stderr,
182
186
  )
183
187
  # Bad checksum, rotate 1 and keep looking for non-garbage
188
+ discarded += data[0:1]
184
189
  data = data[1:]
185
190
  continue
186
191
  # Case of not enough data for a full packet, return hoping for more later
187
- return None, data
188
- return None, data
192
+ return None, data, discarded
193
+ return None, data, discarded
189
194
 
190
195
 
191
196
  class TcpServerFramerDeframer(FramerDeframer):
@@ -237,11 +242,11 @@ class TcpServerFramerDeframer(FramerDeframer):
237
242
  data = data[1:]
238
243
  # Break out of data when not enough
239
244
  if len(data) < 8:
240
- return None, data
245
+ return None, data, b""
241
246
  # Read the length and break if not enough data
242
247
  (data_len,) = struct.unpack_from(">I", data, 4)
243
248
  if len(data) < data_len + 8:
244
- return None, data
249
+ return None, data, b""
245
250
  packet = data[8 : data_len + 8]
246
251
  data = data[data_len + 8 :]
247
- return packet, data
252
+ return packet, data, b""
@@ -91,7 +91,7 @@ class TCPGround(GroundHandler):
91
91
  :return: list deframed packets
92
92
  """
93
93
  self.data += self.tcp.read()
94
- (frames, self.data) = self.deframer.deframe_all(self.data, no_copy=True)
94
+ (frames, self.data, _) = self.deframer.deframe_all(self.data, no_copy=True)
95
95
  return frames
96
96
 
97
97
  def send_all(self, frames):
@@ -36,16 +36,23 @@ class Downlinker:
36
36
  """
37
37
 
38
38
  def __init__(
39
- self, adapter: BaseAdapter, ground: GroundHandler, deframer: FramerDeframer
39
+ self,
40
+ adapter: BaseAdapter,
41
+ ground: GroundHandler,
42
+ deframer: FramerDeframer,
43
+ discarded=None,
40
44
  ):
41
45
  """Initialize the downlinker
42
46
 
43
- Constructs a new downlinker object used to run the downlink and deframing operation.
47
+ Constructs a new downlinker object used to run the downlink and deframing operation. This downlinker will log
48
+ discarded (unframed) data when discarded is a writable data object. When discarded is None the discarded data is
49
+ dropped.
44
50
 
45
51
  Args:
46
52
  adapter: adapter used to read raw data from the hardware connection
47
53
  ground: handles the ground side connection
48
54
  deframer: deframer used to deframe data from the communication format
55
+ discarded: file to write discarded data to. None to drop the data.
49
56
  """
50
57
  self.running = True
51
58
  self.th_ground = None
@@ -54,13 +61,18 @@ class Downlinker:
54
61
  self.ground = ground
55
62
  self.deframer = deframer
56
63
  self.outgoing = Queue()
64
+ self.discarded = discarded
57
65
 
58
66
  def start(self):
59
67
  """Starts the downlink pipeline"""
60
- self.th_ground = threading.Thread(target=self.sending, name="DownlinkTTSGroundThread")
68
+ self.th_ground = threading.Thread(
69
+ target=self.sending, name="DownlinkTTSGroundThread"
70
+ )
61
71
  self.th_ground.daemon = True
62
72
  self.th_ground.start()
63
- self.th_data = threading.Thread(target=self.deframing, name="DownLinkDeframingThread")
73
+ self.th_data = threading.Thread(
74
+ target=self.deframing, name="DownLinkDeframingThread"
75
+ )
64
76
  self.th_data.daemon = True
65
77
  self.th_data.start()
66
78
 
@@ -74,12 +86,20 @@ class Downlinker:
74
86
  while self.running:
75
87
  # Blocks until data is available, but may still return b"" if timeout
76
88
  pool += self.adapter.read()
77
- frames, pool = self.deframer.deframe_all(pool, no_copy=True)
89
+ frames, pool, discarded_data = self.deframer.deframe_all(pool, no_copy=True)
78
90
  try:
79
91
  for frame in frames:
80
92
  self.outgoing.put_nowait(frame)
81
93
  except Full:
82
94
  DW_LOGGER.warning("GDS ground queue full, dropping frame")
95
+ try:
96
+ if self.discarded is not None:
97
+ self.discarded.write(discarded_data)
98
+ self.discarded.flush()
99
+ # Failure to write discarded data should never stop the GDS. Log it and move on.
100
+ except Exception as exc:
101
+ DW_LOGGER.warning("Cannot write discarded data %s", exc)
102
+ self.discarded = None # Give up on logging further data
83
103
 
84
104
  def sending(self):
85
105
  """Outgoing stage of downlink
@@ -107,6 +127,7 @@ class Downlinker:
107
127
  for thread in [self.th_data, self.th_ground]:
108
128
  if thread is not None:
109
129
  thread.join()
130
+ self.discarded = None
110
131
 
111
132
  def add_loopback_frame(self, frame):
112
133
  """Adds a frame to loopback to ground
@@ -52,7 +52,6 @@ class FileDownlinker(fprime_gds.common.handlers.DataHandler):
52
52
  self.sequence = 0 # Keeps track of what the current sequence ID should be
53
53
  self.timer = fprime_gds.common.files.helpers.Timeout()
54
54
  self.timer.setup(self.timeout, timeout)
55
- os.makedirs(self.__directory, exist_ok=True)
56
55
 
57
56
  def data_callback(self, data, sender=None):
58
57
  """
@@ -96,7 +95,15 @@ class FileDownlinker(fprime_gds.common.handlers.DataHandler):
96
95
  size,
97
96
  self.__log_dir,
98
97
  )
99
- self.active.open(TransmitFileState.WRITE)
98
+ try:
99
+ self.active.open(TransmitFileState.WRITE)
100
+ except PermissionError as exc:
101
+ self.state = FileStates.ERROR
102
+ LOGGER.warning(
103
+ "Unable to open file for writing. Discarding subsequent downlink packets. "
104
+ + str(exc)
105
+ )
106
+ return
100
107
  LOGGER.addHandler(self.active.log_handler)
101
108
  message = "Received START packet with metadata:\n"
102
109
  message += "\tSize: %d\n"
@@ -116,7 +123,10 @@ class FileDownlinker(fprime_gds.common.handlers.DataHandler):
116
123
  # Initialize all relevant DATA packet attributes into variables from file_data
117
124
  offset = data.offset
118
125
  if self.state != FileStates.RUNNING:
119
- LOGGER.warning("Received unexpected data packet for offset: %d", offset)
126
+ # ERROR state means GDS is not ready to receive data packets (permission error)
127
+ # No need to log this, as it is already logged in handle_start and would only spam logs
128
+ if self.state != FileStates.ERROR:
129
+ LOGGER.warning("Received unexpected data packet for offset: %d", offset)
120
130
  else:
121
131
  if data.seqID != self.sequence:
122
132
  LOGGER.warning(
@@ -156,9 +166,10 @@ class FileDownlinker(fprime_gds.common.handlers.DataHandler):
156
166
  """
157
167
  # Initialize all relevant END packet attributes into variables from file_data
158
168
  # hashValue attribute is not relevant right now, but might be in the future
159
- if self.state != FileStates.RUNNING:
160
- LOGGER.warning("Received unexpected END packet")
161
- else:
169
+ if self.state == FileStates.ERROR:
170
+ LOGGER.info("Received END packet for discarded downlink")
171
+ self.finish()
172
+ elif self.state == FileStates.RUNNING:
162
173
  if data.seqID != self.sequence:
163
174
  LOGGER.warning(
164
175
  "End packet has unexpected sequence id. Expected: %d got %d",
@@ -167,6 +178,8 @@ class FileDownlinker(fprime_gds.common.handlers.DataHandler):
167
178
  )
168
179
  LOGGER.info("Received END packet, finishing downlink")
169
180
  self.finish()
181
+ else:
182
+ LOGGER.warning("Received unexpected END packet")
170
183
 
171
184
  def timeout(self):
172
185
  """Timeout the current downlink"""
@@ -77,6 +77,7 @@ class FileStates(enum.Enum):
77
77
  RUNNING = 1
78
78
  CANCELED = 2
79
79
  END_WAIT = 3 # Waiting for the handshake for CANCEL or END packet
80
+ ERROR = 4
80
81
 
81
82
 
82
83
  class CFDPChecksum:
@@ -117,23 +117,6 @@ class XmlLoader(dict_loader.DictLoader):
117
117
  # Parse xml and get element tree object we can retrieve data from
118
118
  element_tree = etree.parse(fd, parser=xml_parser)
119
119
  root = element_tree.getroot()
120
-
121
- # Check version of the XML before continuing. Versions weren't published before 1.5.4. Only check major minor
122
- # and point versions to allow for development versions to be allowed.
123
- dict_version_string = root.attrib.get("framework_version", "1.5.4")
124
- digits = []
125
- # Process through the tokens of the version until we hit something that is not an int
126
- for token in dict_version_string.split("."):
127
- try:
128
- digits.append(int(token))
129
- except ValueError:
130
- break
131
- dict_version = tuple(digits)
132
- if (
133
- dict_version < MINIMUM_SUPPORTED_FRAMEWORK_VERSION
134
- or dict_version > MAXIMUM_SUPPORTED_FRAMEWORK_VERSION
135
- ):
136
- raise UnsupportedDictionaryVersionException(dict_version)
137
120
  return root
138
121
 
139
122
  @staticmethod
@@ -401,22 +384,3 @@ class XmlLoader(dict_loader.DictLoader):
401
384
  raise exceptions.GseControllerParsingException(
402
385
  msg
403
386
  )
404
-
405
-
406
- class UnsupportedDictionaryVersionException(Exception):
407
- """Dictionary is of unsupported version"""
408
-
409
- def __init__(self, version):
410
- """Create a dictionary of a specific version"""
411
-
412
- def pretty(version_tuple):
413
- """Pretty print version"""
414
- return ".".join([str(item) for item in version_tuple])
415
-
416
- super().__init__(
417
- "Dictionary version {} is not in supported range: {}-{}. Please upgrade fprime-gds.".format(
418
- pretty(version),
419
- pretty(MINIMUM_SUPPORTED_FRAMEWORK_VERSION),
420
- pretty(MAXIMUM_SUPPORTED_FRAMEWORK_VERSION),
421
- )
422
- )
@@ -10,6 +10,8 @@ import logging
10
10
  import os
11
11
  import sys
12
12
 
13
+ INITIALIZED = False
14
+
13
15
 
14
16
  def configure_py_log(directory=None, filename=sys.argv[0], mirror_to_stdout=False):
15
17
  """
@@ -21,7 +23,14 @@ def configure_py_log(directory=None, filename=sys.argv[0], mirror_to_stdout=Fals
21
23
  :param mode: of file to write
22
24
  :param mirror_to_stdout: mirror the log output to standard our
23
25
  """
24
- handlers = [logging.StreamHandler(sys.stdout)] if directory is None or mirror_to_stdout else []
26
+ global INITIALIZED
27
+ if INITIALIZED:
28
+ return
29
+ handlers = (
30
+ [logging.StreamHandler(sys.stdout)]
31
+ if directory is None or mirror_to_stdout
32
+ else []
33
+ )
25
34
  if directory is not None:
26
35
  log_file = os.path.join(directory, os.path.basename(filename))
27
36
  log_file = log_file if log_file.endswith(".log") else f"{log_file}.log"
@@ -33,4 +42,4 @@ def configure_py_log(directory=None, filename=sys.argv[0], mirror_to_stdout=Fals
33
42
  logging.getLogger().addHandler(handler)
34
43
  logging.getLogger().setLevel(logging.INFO)
35
44
  logging.info("Logging system initialized!")
36
-
45
+ INITIALIZED = True
@@ -7,6 +7,7 @@ communications layer.
7
7
 
8
8
  @author mstarch
9
9
  """
10
+ import os
10
11
  import fprime_gds.common.files.downlinker
11
12
  import fprime_gds.common.files.uplinker
12
13
 
@@ -27,7 +28,8 @@ class Filing:
27
28
  self, down_store, file_encoder, file_decoder, distributor, log_dir
28
29
  ):
29
30
  """
30
- Sets up the file handling (uplink and downlink) from a pair of encoders and decoders
31
+ Sets up the file handling (uplink and downlink) from a pair of encoders and decoders.
32
+ Raises a PermissionError if the down_store is not writable.
31
33
 
32
34
  :param down_store: downlink storage directory
33
35
  :param file_encoder: file encoder for uplink
@@ -41,6 +43,11 @@ class Filing:
41
43
  )
42
44
  file_decoder.register(self.__downlinker)
43
45
  distributor.register("FW_PACKET_HAND", self.__uplinker)
46
+ if not os.access(down_store, os.W_OK):
47
+ raise PermissionError(
48
+ f"{down_store} is not writable. Downlinker not be able to save files. "
49
+ "Fix permissions or change storage directory with --file-storage-directory."
50
+ )
44
51
 
45
52
  @property
46
53
  def uplinker(self):
@@ -226,12 +226,16 @@ class DetectionParser(ParserBase):
226
226
  if likely_deployment.exists():
227
227
  args.deployment = likely_deployment
228
228
  return args
229
- child_directories = [child for child in detected_toolchain.iterdir() if child.is_dir()]
229
+ child_directories = [
230
+ child for child in detected_toolchain.iterdir() if child.is_dir()
231
+ ]
230
232
  if not child_directories:
231
233
  msg = f"No deployments found in {detected_toolchain}. Specify deployment with: --deployment"
232
234
  raise Exception(msg)
233
235
  # Works for the old structure where the bin, lib, and dict directories live immediately under the platform
234
- elif len(child_directories) == 3 and set([path.name for path in child_directories]) == {"bin", "lib", "dict"}:
236
+ elif len(child_directories) == 3 and set(
237
+ [path.name for path in child_directories]
238
+ ) == {"bin", "lib", "dict"}:
235
239
  args.deployment = detected_toolchain
236
240
  return args
237
241
  elif len(child_directories) > 1:
@@ -310,8 +314,7 @@ class CommAdapterParser(ParserBase):
310
314
  "action": "store",
311
315
  "type": str,
312
316
  "help": "Adapter for communicating to flight deployment. [default: %(default)s]",
313
- "choices": ["none"]
314
- + list(adapter_definition_dictionaries),
317
+ "choices": ["none"] + list(adapter_definition_dictionaries),
315
318
  "default": "ip",
316
319
  },
317
320
  ("--comm-checksum-type",): {
@@ -326,6 +329,15 @@ class CommAdapterParser(ParserBase):
326
329
  ],
327
330
  "default": fprime_gds.common.communication.checksum.CHECKSUM_SELECTION,
328
331
  },
332
+ ("--output-unframed-data",): {
333
+ "dest": "output_unframed_data",
334
+ "action": "store",
335
+ "nargs": "?",
336
+ "help": "Log unframed data to supplied file relative to log directory. Use '-' for standard out.",
337
+ "default": None,
338
+ "const": "unframed.log",
339
+ "required": False,
340
+ },
329
341
  }
330
342
  return {**adapter_arguments, **com_arguments}
331
343
 
@@ -699,9 +711,7 @@ class BinaryDeployment(DetectionParser):
699
711
  args.app = Path(args.app) if args.app else Path(find_app(args.deployment))
700
712
  if not args.app.is_file():
701
713
  msg = f"F prime binary '{args.app}' does not exist or is not a file"
702
- raise ValueError(
703
- msg
704
- )
714
+ raise ValueError(msg)
705
715
  return args
706
716
 
707
717
 
@@ -728,7 +738,7 @@ class SearchArgumentsParser(ParserBase):
728
738
  "action": "store",
729
739
  "required": False,
730
740
  "type": int,
731
- "nargs":'+',
741
+ "nargs": "+",
732
742
  "help": f"only show {self.command_name} matching the given type ID(s) 'ID'; can provide multiple IDs to show all given types",
733
743
  "metavar": "ID",
734
744
  },
@@ -21,6 +21,8 @@ import logging
21
21
  import signal
22
22
  import sys
23
23
 
24
+ from pathlib import Path
25
+
24
26
  # Required adapters built on standard tools
25
27
  try:
26
28
  from fprime_gds.common.zmq_transport import ZmqGround
@@ -84,34 +86,59 @@ def main():
84
86
  # Set the framing class used and pass it to the uplink and downlink component constructions giving each a separate
85
87
  # instantiation
86
88
  framer_class = FpFramerDeframer
87
- LOGGER.info("Starting uplinker/downlinker connecting to FSW using %s with %s", adapter, framer_class.__name__)
88
- downlinker = Downlinker(adapter, ground, framer_class())
89
- uplinker = Uplinker(adapter, ground, framer_class(), downlinker)
90
-
91
- # Open resources for the handlers on either side, this prepares the resources needed for reading/writing data
92
- ground.open()
93
- adapter.open()
94
-
95
- # Finally start the processing of uplink and downlink
96
- downlinker.start()
97
- uplinker.start()
98
- LOGGER.debug("Uplinker and downlinker running")
99
-
100
- # Wait for shutdown event in the form of a KeyboardInterrupt then stop the processing, close resources, and wait for
101
- # everything to terminate as expected.
102
- def shutdown(*_):
103
- """Shutdown function for signals"""
104
- uplinker.stop()
105
- downlinker.stop()
89
+ LOGGER.info(
90
+ "Starting uplinker/downlinker connecting to FSW using %s with %s",
91
+ adapter,
92
+ framer_class.__name__,
93
+ )
94
+ discarded_file_handle = None
95
+ try:
96
+ if args.output_unframed_data == "-":
97
+ discarded_file_handle = sys.stdout.buffer
98
+ elif args.output_unframed_data is not None:
99
+ discarded_file_handle_path = (
100
+ Path(args.logs) / Path(args.output_unframed_data)
101
+ ).resolve()
102
+ try:
103
+ discarded_file_handle = open(discarded_file_handle_path, "wb")
104
+ LOGGER.info("Logging unframed data to %s", discarded_file_handle_path)
105
+ except OSError:
106
+ LOGGER.warning(
107
+ "Failed to open %s. Unframed data will be discarded.",
108
+ discarded_file_handle_path,
109
+ )
110
+ downlinker = Downlinker(
111
+ adapter, ground, framer_class(), discarded=discarded_file_handle
112
+ )
113
+ uplinker = Uplinker(adapter, ground, framer_class(), downlinker)
114
+
115
+ # Open resources for the handlers on either side, this prepares the resources needed for reading/writing data
116
+ ground.open()
117
+ adapter.open()
118
+
119
+ # Finally start the processing of uplink and downlink
120
+ downlinker.start()
121
+ uplinker.start()
122
+ LOGGER.debug("Uplinker and downlinker running")
123
+
124
+ # Wait for shutdown event in the form of a KeyboardInterrupt then stop the processing, close resources, and wait for
125
+ # everything to terminate as expected.
126
+ def shutdown(*_):
127
+ """Shutdown function for signals"""
128
+ uplinker.stop()
129
+ downlinker.stop()
130
+ uplinker.join()
131
+ downlinker.join()
132
+ ground.close()
133
+ adapter.close()
134
+
135
+ signal.signal(signal.SIGTERM, shutdown)
136
+ signal.signal(signal.SIGINT, shutdown)
106
137
  uplinker.join()
107
138
  downlinker.join()
108
- ground.close()
109
- adapter.close()
110
-
111
- signal.signal(signal.SIGTERM, shutdown)
112
- signal.signal(signal.SIGINT, shutdown)
113
- uplinker.join()
114
- downlinker.join()
139
+ finally:
140
+ if discarded_file_handle is not None and args.output_unframed_data != "-":
141
+ discarded_file_handle.close()
115
142
  return 0
116
143
 
117
144
 
@@ -121,7 +121,18 @@ class FullListHistory extends ListHistory {
121
121
  * @param new_items: new items being to be process
122
122
  */
123
123
  send(new_items) {
124
- this.store.splice(0, this.store.length, ...new_items);
124
+ // When the lists are not the same, update the stored list otherwise keep the list to prevent unnecessary bound
125
+ // data re-rendering.
126
+ if (this.store.length !== new_items.length) {
127
+ this.store.splice(0, this.store.length, ...new_items);
128
+ return;
129
+ }
130
+ for (let i = 0; i < Math.min(this.store.length, new_items.length); i++) {
131
+ if (this.store[i] !== new_items[i]) {
132
+ this.store.splice(0, this.store.length, ...new_items);
133
+ return;
134
+ }
135
+ }
125
136
  }
126
137
  }
127
138