fprime-gds 3.6.2a1__py3-none-any.whl → 4.0.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. fprime_gds/common/communication/adapters/uart.py +34 -25
  2. fprime_gds/common/decoders/ch_decoder.py +1 -1
  3. fprime_gds/common/decoders/event_decoder.py +2 -1
  4. fprime_gds/common/decoders/pkt_decoder.py +1 -1
  5. fprime_gds/common/distributor/distributor.py +2 -2
  6. fprime_gds/common/encoders/ch_encoder.py +2 -2
  7. fprime_gds/common/encoders/cmd_encoder.py +2 -2
  8. fprime_gds/common/encoders/event_encoder.py +2 -2
  9. fprime_gds/common/encoders/pkt_encoder.py +2 -2
  10. fprime_gds/common/encoders/seq_writer.py +2 -2
  11. fprime_gds/common/fpy/__init__.py +0 -0
  12. fprime_gds/common/fpy/serialize_bytecode.py +229 -0
  13. fprime_gds/common/fpy/types.py +203 -0
  14. fprime_gds/common/gds_cli/base_commands.py +1 -1
  15. fprime_gds/common/handlers.py +39 -0
  16. fprime_gds/common/loaders/fw_type_json_loader.py +54 -0
  17. fprime_gds/common/loaders/pkt_json_loader.py +121 -0
  18. fprime_gds/common/loaders/prm_json_loader.py +85 -0
  19. fprime_gds/common/pipeline/dictionaries.py +21 -4
  20. fprime_gds/common/pipeline/encoding.py +19 -0
  21. fprime_gds/common/pipeline/histories.py +4 -0
  22. fprime_gds/common/pipeline/standard.py +16 -2
  23. fprime_gds/common/templates/prm_template.py +81 -0
  24. fprime_gds/common/testing_fw/api.py +42 -0
  25. fprime_gds/common/testing_fw/pytest_integration.py +25 -2
  26. fprime_gds/common/tools/README.md +34 -0
  27. fprime_gds/common/tools/params.py +246 -0
  28. fprime_gds/common/utils/config_manager.py +6 -6
  29. fprime_gds/executables/apps.py +184 -11
  30. fprime_gds/executables/cli.py +443 -125
  31. fprime_gds/executables/comm.py +5 -2
  32. fprime_gds/executables/fprime_cli.py +3 -3
  33. fprime_gds/executables/run_deployment.py +12 -4
  34. fprime_gds/flask/static/js/vue-support/channel.js +1 -1
  35. fprime_gds/flask/static/js/vue-support/event.js +1 -1
  36. fprime_gds/plugin/definitions.py +86 -8
  37. fprime_gds/plugin/system.py +171 -58
  38. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/METADATA +18 -19
  39. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/RECORD +44 -35
  40. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/WHEEL +1 -1
  41. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/entry_points.txt +2 -0
  42. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info/licenses}/LICENSE.txt +0 -0
  43. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info/licenses}/NOTICE.txt +0 -0
  44. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/top_level.txt +0 -0
@@ -6,7 +6,10 @@ defines the "DataHandler" base class for handling data.
6
6
 
7
7
  @author mstarch
8
8
  """
9
+
9
10
  import abc
11
+ from typing import List, Type
12
+ from fprime_gds.plugin.definitions import gds_plugin_specification
10
13
 
11
14
 
12
15
  class DataHandler(abc.ABC):
@@ -27,6 +30,42 @@ class DataHandler(abc.ABC):
27
30
  """
28
31
 
29
32
 
33
+ class DataHandlerPlugin(DataHandler, abc.ABC):
34
+ """PLugin class allowing for custom data handlers
35
+
36
+ This class acts as a DataHandler class with the addition that it can be used as a plugin and thus self reports the
37
+ data types it handles (whereas DataHandler leaves that up to the registration call). Users shall concretely subclass
38
+ this class with their own data handling functionality.
39
+ """
40
+
41
+ @abc.abstractmethod
42
+ def get_handled_descriptors() -> List[str]:
43
+ """Return a list of data descriptor names this plugin handles"""
44
+ raise NotImplementedError()
45
+
46
+ @classmethod
47
+ @gds_plugin_specification
48
+ def register_data_handler_plugin(cls) -> Type["DataHandlerPlugin"]:
49
+ """Register a plugin to provide post-decoding data handling capabilities
50
+
51
+ Plugin hook for registering a plugin that supplies a DataHandler implementation. Implementors of this hook must
52
+ return a non-abstract subclass of DataHandlerPlugin. This class will be provided as a data handling
53
+ that is automatically enabled. Users may disable this via the command line. This data handler will be supplied
54
+ all data types returned by the `get_data_types()` method.
55
+
56
+ This DataHandler will run within the standard GDS (UI) process. Users wanting a separate process shall use a
57
+ GdsApp plugin instead.
58
+
59
+ Note: users should return the class, not an instance of the class. Needed arguments for instantiation are
60
+ determined from class methods, solicited via the command line, and provided at construction time to the chosen
61
+ instantiation.
62
+
63
+ Returns:
64
+ DataHandlerPlugin subclass (not instance)
65
+ """
66
+ raise NotImplementedError()
67
+
68
+
30
69
  class HandlerRegistrar(abc.ABC):
31
70
  """
32
71
  Defines a class that will take in registrants and remember them for calling back later. These objects should be of
@@ -0,0 +1,54 @@
1
+ """
2
+ fw_type_json_loader.py:
3
+
4
+ Loads flight dictionary (JSON) and returns name based Python dictionaries of Fw types
5
+
6
+ @author jawest
7
+ """
8
+
9
+ from fprime_gds.common.loaders.json_loader import JsonLoader
10
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
11
+
12
+ class FwTypeJsonLoader(JsonLoader):
13
+ """Class to load python based Fw type dictionaries"""
14
+
15
+ TYPE_DEFINITIONS_FIELD = "typeDefinitions"
16
+
17
+ def construct_dicts(self, _):
18
+ """
19
+ Constructs and returns python dictionaries keyed on id and name
20
+
21
+ Args:
22
+ _: Unused argument (inherited)
23
+ Returns:
24
+ A tuple with two Fw type dictionaries (python type dict):
25
+ (id_dict, name_dict). The keys should be the type id and
26
+ name fields respectively and the values should be type name
27
+ strings. Note: An empty id dictionary is returned since there
28
+ are no id fields in the Fw type alias JSON dictionary entries.
29
+ """
30
+ id_dict = {}
31
+ name_dict = {}
32
+
33
+ if self.TYPE_DEFINITIONS_FIELD not in self.json_dict:
34
+ raise GdsDictionaryParsingException(
35
+ f"Ground Dictionary missing '{self.TYPE_DEFINITIONS_FIELD}' field: {str(self.json_file)}"
36
+ )
37
+
38
+ for type_def in self.json_dict[self.TYPE_DEFINITIONS_FIELD]:
39
+ try:
40
+ if type_def["kind"] == "alias":
41
+ name = str(type_def["qualifiedName"])
42
+ # Only consider names with the pattern Fw*Type
43
+ if name.startswith("Fw") and name.endswith("Type"):
44
+ name_dict[type_def["qualifiedName"]] = type_def["underlyingType"]["name"]
45
+ except KeyError as e:
46
+ raise GdsDictionaryParsingException(
47
+ f"{str(e)} key missing from Type Definition dictionary entry: {str(type_def)}"
48
+ )
49
+
50
+ return (
51
+ dict(sorted(id_dict.items())),
52
+ dict(sorted(name_dict.items())),
53
+ self.get_versions(),
54
+ )
@@ -0,0 +1,121 @@
1
+ """
2
+ pkt_json_loader.py:
3
+
4
+ Loads flight dictionary (JSON) and returns Python dictionaries of telemetry packets
5
+
6
+ @author jawest
7
+ """
8
+
9
+ from fprime_gds.common.templates.pkt_template import PktTemplate
10
+ from fprime_gds.common.loaders.json_loader import JsonLoader
11
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
12
+
13
+
14
+ class PktJsonLoader(JsonLoader):
15
+ """Class to load python based telemetry packet dictionaries"""
16
+
17
+ PACKETS_FIELD = "telemetryPacketSets"
18
+
19
+ SET_NAME = "name"
20
+ MEMBERS = "members"
21
+
22
+ def get_id_dict(self, path, packet_set_name: str, ch_name_dict: dict):
23
+ if path in self.saved_dicts and packet_set_name in self.saved_dicts[path]:
24
+ (id_dict, name_dict) = self.saved_dicts[path][packet_set_name]
25
+ else:
26
+ (id_dict, name_dict, self.versions) = self.construct_dicts(packet_set_name, ch_name_dict)
27
+ if path not in self.saved_dicts:
28
+ self.saved_dicts[path] = dict()
29
+ self.saved_dicts[path].update({packet_set_name: (id_dict, name_dict)})
30
+
31
+ return id_dict
32
+
33
+ def get_name_dict(self, path, packet_set_name: str, ch_name_dict: dict):
34
+ if path in self.saved_dicts and packet_set_name in self.saved_dicts[path]:
35
+ (id_dict, name_dict) = self.saved_dicts[path][packet_set_name]
36
+ else:
37
+ (id_dict, name_dict, self.versions) = self.construct_dicts(packet_set_name, ch_name_dict)
38
+ if path not in self.saved_dicts:
39
+ self.saved_dicts[path] = dict()
40
+ self.saved_dicts[path].update({packet_set_name: (id_dict, name_dict)})
41
+
42
+ return name_dict
43
+
44
+
45
+ def construct_dicts(self, packet_set_name: str, ch_name_dict: dict):
46
+ """
47
+ Constructs and returns python dictionaries keyed on id and name
48
+
49
+ This function should not be called directly, instead, use
50
+ get_id_dict(path) and get_name_dict(path)
51
+
52
+ Args:
53
+ ch_name_dict (dict()): Channel dictionary with names as keys and
54
+ ChTemplate objects as values.
55
+
56
+ Returns:
57
+ A tuple with two packet dictionaries (type==dict()):
58
+ (id_dict, name_dict) and the dictionary version. The keys of the packet dictionaries should
59
+ be the packets' id and name fields respectively and the values should be PktTemplate objects.
60
+ """
61
+ id_dict = {}
62
+ name_dict = {}
63
+
64
+ if self.PACKETS_FIELD not in self.json_dict:
65
+ raise GdsDictionaryParsingException(
66
+ f"Ground Dictionary missing '{self.PACKETS_FIELD}' field: {str(self.json_file)}"
67
+ )
68
+
69
+ for packet_dict in self.json_dict[self.PACKETS_FIELD]:
70
+ try:
71
+ if packet_set_name == packet_dict[self.SET_NAME]:
72
+ for packet_group_dict in packet_dict.get(self.MEMBERS, []):
73
+ packet_temp = self.construct_template_from_dict(packet_group_dict, ch_name_dict)
74
+ id_dict[packet_temp.get_id()] = packet_temp
75
+ name_dict[packet_temp.get_name()] = packet_temp
76
+
77
+ return (
78
+ dict(sorted(id_dict.items())),
79
+ dict(sorted(name_dict.items())),
80
+ self.get_versions(),
81
+ )
82
+
83
+ except KeyError as e:
84
+ raise GdsDictionaryParsingException(
85
+ f"{str(e)} key missing from telemetry packet dictionary entry: {str(packet_dict)}"
86
+ )
87
+
88
+ raise GdsDictionaryParsingException(
89
+ f"Ground Dictionary does not contain packet set '{packet_set_name}'"
90
+ )
91
+
92
+ def construct_template_from_dict(self, packet_group_dict: dict, ch_name_dict: dict):
93
+ """
94
+ Args:
95
+ packet_group_dict (dict()): Packet group dictionary with group id, name, and members
96
+ ch_name_dict (dict()): Channel dictionary with names as keys and ChTemplate objects as values.
97
+ Returns:
98
+ A a PktTemplate object containing the packet group id, group name, and list of ChTemplate
99
+ objects that represent each member in the packet.
100
+ """
101
+ try:
102
+ ch_list = []
103
+ group_name = packet_group_dict["name"]
104
+ group_id = packet_group_dict["id"]
105
+ group_members = packet_group_dict["members"]
106
+
107
+ for ch_name in group_members:
108
+ ch_template = ch_name_dict[ch_name]
109
+ ch_list.append(ch_template)
110
+
111
+ except KeyError as e:
112
+ raise GdsDictionaryParsingException(
113
+ f"{str(e)} key missing from telemetry packet member or member is not a channel in the dictionary: {str(group_name)}"
114
+ )
115
+
116
+ return PktTemplate(
117
+ group_id,
118
+ group_name,
119
+ ch_list
120
+ )
121
+
@@ -0,0 +1,85 @@
1
+ """
2
+ prm_json_loader.py:
3
+
4
+ Loads flight dictionary (JSON) and returns id and mnemonic based Python dictionaries of params
5
+
6
+ @author zimri.leisher
7
+ """
8
+
9
+ from fprime_gds.common.templates.prm_template import PrmTemplate
10
+ from fprime_gds.common.loaders.json_loader import JsonLoader
11
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
12
+
13
+
14
+ class PrmJsonLoader(JsonLoader):
15
+ """Class to load parameters from json dictionaries"""
16
+
17
+ PARAMS_FIELD = "parameters"
18
+
19
+ ID = "id"
20
+ NAME = "name"
21
+ TYPE = "type"
22
+ DESC = "annotation"
23
+ DEFAULT = "default"
24
+
25
+
26
+ def construct_dicts(self, _):
27
+ """
28
+ Constructs and returns python dictionaries keyed on id and name
29
+
30
+ Args:
31
+ _: Unused argument (inherited)
32
+ Returns:
33
+ A tuple with two channel dictionaries (python type dict):
34
+ (id_dict, fqn_name_dict). The keys should be the channels' id and
35
+ fully qualified name fields respectively and the values should be PrmTemplate
36
+ objects.
37
+ """
38
+ id_dict = {}
39
+ fqn_name_dict = {}
40
+
41
+ if self.PARAMS_FIELD not in self.json_dict:
42
+ raise GdsDictionaryParsingException(
43
+ f"Ground Dictionary missing '{self.PARAMS_FIELD}' field: {str(self.json_file)}"
44
+ )
45
+
46
+ for prm_dict in self.json_dict[self.PARAMS_FIELD]:
47
+ # Create a channel template object
48
+ prm_temp = self.construct_template_from_dict(prm_dict)
49
+
50
+ id_dict[prm_temp.get_id()] = prm_temp
51
+ fqn_name_dict[prm_temp.get_full_name()] = prm_temp
52
+
53
+ return (
54
+ dict(sorted(id_dict.items())),
55
+ dict(sorted(fqn_name_dict.items())),
56
+ self.get_versions(),
57
+ )
58
+
59
+ def construct_template_from_dict(self, prm_dict: dict) -> PrmTemplate:
60
+ try:
61
+ prm_id = prm_dict[self.ID]
62
+ # The below assignment also raises a ValueError if the name does not contain a '.'
63
+ qualified_component_name, prm_name = prm_dict[self.NAME].rsplit('.', 1)
64
+ if not qualified_component_name or not prm_name:
65
+ raise ValueError()
66
+
67
+ type_obj = self.parse_type(prm_dict[self.TYPE])
68
+ except ValueError as e:
69
+ raise GdsDictionaryParsingException(
70
+ f"Parameter dictionary entry malformed, expected name of the form '<QUAL_COMP_NAME>.<PRM_NAME>' in : {str(prm_dict)}"
71
+ )
72
+ except KeyError as e:
73
+ raise GdsDictionaryParsingException(
74
+ f"{str(e)} key missing from parameter dictionary entry or its associated type in the dictionary: {str(prm_dict)}"
75
+ )
76
+
77
+ prm_default_val = prm_dict.get(self.DEFAULT, None)
78
+
79
+ return PrmTemplate(
80
+ prm_id,
81
+ prm_name,
82
+ qualified_component_name,
83
+ type_obj,
84
+ prm_default_val
85
+ )
@@ -14,6 +14,8 @@ from pathlib import Path
14
14
  import fprime_gds.common.loaders.ch_xml_loader
15
15
  import fprime_gds.common.loaders.cmd_xml_loader
16
16
  import fprime_gds.common.loaders.event_xml_loader
17
+ import fprime_gds.common.loaders.fw_type_json_loader
18
+ import fprime_gds.common.loaders.pkt_json_loader
17
19
  import fprime_gds.common.loaders.pkt_xml_loader
18
20
 
19
21
  # JSON Loaders
@@ -21,7 +23,6 @@ import fprime_gds.common.loaders.ch_json_loader
21
23
  import fprime_gds.common.loaders.cmd_json_loader
22
24
  import fprime_gds.common.loaders.event_json_loader
23
25
 
24
-
25
26
  class Dictionaries:
26
27
  """
27
28
  Dictionaries class to encapsulate the many different dictionaries used in the system. This includes the following
@@ -45,10 +46,11 @@ class Dictionaries:
45
46
  self._event_name_dict = None
46
47
  self._channel_name_dict = None
47
48
  self._packet_dict = None
49
+ self._fw_type_name_dict = None
48
50
  self._versions = None
49
51
  self._metadata = None
50
52
 
51
- def load_dictionaries(self, dictionary, packet_spec):
53
+ def load_dictionaries(self, dictionary, packet_spec, packet_set_name):
52
54
  """
53
55
  Loads the dictionaries based on the dictionary path supplied. Optional packet_spec is allowed to specify the
54
56
  definitions of packets.
@@ -75,6 +77,11 @@ class Dictionaries:
75
77
  )
76
78
  self._channel_name_dict = json_channel_loader.get_name_dict(None)
77
79
  self._channel_id_dict = json_channel_loader.get_id_dict(None)
80
+ # Fw Types
81
+ fw_types_loader = fprime_gds.common.loaders.fw_type_json_loader.FwTypeJsonLoader(
82
+ dictionary
83
+ )
84
+ self._fw_type_name_dict = fw_types_loader.get_name_dict(None)
78
85
  # Metadata
79
86
  self._versions = json_event_loader.get_versions()
80
87
  self._metadata = json_event_loader.get_metadata().copy()
@@ -121,8 +128,13 @@ class Dictionaries:
121
128
  msg = f"[ERROR] Dictionary '{dictionary}' does not exist."
122
129
  raise Exception(msg)
123
130
  # Check for packet specification
124
- if packet_spec is not None:
125
- packet_loader = fprime_gds.common.loaders.pkt_xml_loader.PktXmlLoader()
131
+ if self._metadata["dictionary_type"] == "json" and packet_set_name is not None:
132
+ packet_loader = fprime_gds.common.loaders.pkt_json_loader.PktJsonLoader(dictionary)
133
+ self._packet_dict = packet_loader.get_id_dict(
134
+ None, packet_set_name, self._channel_name_dict
135
+ )
136
+ elif packet_spec is not None:
137
+ packet_loader = fprime_gds.common.loaders.pkt_xml_loader.PktXmlLoader(dictionary)
126
138
  self._packet_dict = packet_loader.get_id_dict(
127
139
  packet_spec, self._channel_name_dict
128
140
  )
@@ -158,6 +170,11 @@ class Dictionaries:
158
170
  def channel_name(self):
159
171
  """Channel dictionary by name"""
160
172
  return self._channel_name_dict
173
+
174
+ @property
175
+ def fw_type_name(self):
176
+ """Fw type name dictionary by name"""
177
+ return self._fw_type_name_dict
161
178
 
162
179
  @property
163
180
  def project_version(self):
@@ -6,6 +6,7 @@ and decoding into a single component that the be composed into the standard pipe
6
6
 
7
7
  @mstarch
8
8
  """
9
+
9
10
  import fprime_gds.common.decoders.ch_decoder
10
11
  import fprime_gds.common.decoders.event_decoder
11
12
  import fprime_gds.common.decoders.file_decoder
@@ -130,6 +131,24 @@ class EncodingDecoding:
130
131
  """
131
132
  return self.channel_decoder.deregister(consumer)
132
133
 
134
+ def register_file_consumer(self, consumer):
135
+ """
136
+ Registers a consumer with the file decoder.
137
+
138
+ :param consumer: consumer of file packets
139
+ """
140
+ self.file_decoder.register(consumer)
141
+
142
+ def remove_file_consumer(self, consumer):
143
+ """
144
+ Removes a consumer from the file decoder. Will raise an error if the history was not
145
+ previously registered.
146
+
147
+ :param consumer: consumer of channels
148
+ :return: a boolean indicating if the consumer was removed.
149
+ """
150
+ return self.file_decoder.deregister(consumer)
151
+
133
152
  def register_command_consumer(self, consumer):
134
153
  """
135
154
  Registers a history with the standard pipeline.
@@ -6,6 +6,7 @@ to compose in this code.
6
6
 
7
7
  @author mstarch
8
8
  """
9
+
9
10
  from typing import Type
10
11
 
11
12
  from fprime_gds.common.history.history import History
@@ -37,6 +38,9 @@ class Histories:
37
38
  :param coders: coders object to register histories with
38
39
  """
39
40
  self.coders = coders
41
+ # Allow implementation type to disable histories
42
+ if self._implementation_type is None:
43
+ return
40
44
  # Create histories, RAM histories for now
41
45
  self.commands = self._implementation_type()
42
46
  self.events = self._implementation_type()
@@ -8,6 +8,7 @@ below.
8
8
 
9
9
  :author: lestarch
10
10
  """
11
+
11
12
  import datetime
12
13
  import os.path
13
14
  from pathlib import Path
@@ -19,6 +20,7 @@ import fprime_gds.common.data_types.cmd_data
19
20
  import fprime_gds.common.distributor.distributor
20
21
  import fprime_gds.common.logger.data_logger
21
22
  from fprime_gds.common.transport import RoutingTag, ThreadedTCPSocketClient
23
+ from fprime_gds.common.utils.config_manager import ConfigManager
22
24
 
23
25
  # Local imports for the sake of composition
24
26
  from . import dictionaries, encoding, files, histories
@@ -54,7 +56,13 @@ class StandardPipeline:
54
56
  self.__transport_type = ThreadedTCPSocketClient
55
57
 
56
58
  def setup(
57
- self, config, dictionary, file_store, logging_prefix=None, packet_spec=None
59
+ self,
60
+ config: ConfigManager,
61
+ dictionary,
62
+ file_store,
63
+ logging_prefix=None,
64
+ packet_spec=None,
65
+ packet_set_name=None,
58
66
  ):
59
67
  """
60
68
  Setup the standard pipeline for moving data from the middleware layer through the GDS layers using the standard
@@ -84,7 +92,13 @@ class StandardPipeline:
84
92
  self.distributor = fprime_gds.common.distributor.distributor.Distributor(config)
85
93
  self.client_socket = self.__transport_type()
86
94
  # Setup dictionaries encoders and decoders
87
- self.dictionaries.load_dictionaries(self.dictionary_path, packet_spec)
95
+ self.dictionaries.load_dictionaries(
96
+ self.dictionary_path, packet_spec, packet_set_name
97
+ )
98
+ # Update config to use Fw types defined in the JSON dictionary
99
+ if self.dictionaries.fw_type_name:
100
+ for fw_type_name, fw_type in self.dictionaries.fw_type_name.items():
101
+ config.set("types", fw_type_name, fw_type)
88
102
  self.coders.setup_coders(
89
103
  self.dictionaries, self.distributor, self.client_socket, config
90
104
  )
@@ -0,0 +1,81 @@
1
+ """
2
+ @brief Params Template class
3
+
4
+ Instances of this class describe a parameter of a component instance (not
5
+ including a specific value)
6
+
7
+ @date Created January 27, 2025
8
+ @author Zimri Leisher
9
+
10
+ @bug Hopefully none
11
+ """
12
+
13
+ from fprime.common.models.serialize.type_base import BaseType
14
+ from fprime.common.models.serialize.type_exceptions import TypeMismatchException
15
+
16
+ from . import data_template
17
+
18
+
19
+ class PrmTemplate(data_template.DataTemplate):
20
+ """Class for param templates that describe parameters of component instances"""
21
+
22
+ def __init__(
23
+ self,
24
+ prm_id: int,
25
+ prm_name: str,
26
+ comp_name: str,
27
+ prm_type_obj: BaseType,
28
+ prm_default_val,
29
+ ):
30
+ """
31
+ Constructor
32
+
33
+ Args:
34
+ prm_id: the id of the parameter
35
+ prm_name: the name of the parameter
36
+ comp_name: the name of the component instance owning this parameter
37
+ prm_type_obj: the instance of BaseType corresponding to the type of this parameter
38
+ prm_default_val: the default value of this parameter, in raw JSON form
39
+ """
40
+ super().__init__()
41
+ # Make sure correct types are passed
42
+ if not isinstance(prm_id, int):
43
+ raise TypeMismatchException(int, type(prm_id))
44
+
45
+ if not isinstance(prm_name, str):
46
+ raise TypeMismatchException(str, type(prm_name))
47
+
48
+ if not isinstance(comp_name, str):
49
+ raise TypeMismatchException(str, type(comp_name))
50
+
51
+ if not issubclass(prm_type_obj, BaseType):
52
+ raise TypeMismatchException(BaseType, prm_type_obj)
53
+
54
+ # prm_default_val is an arbitrary type, likely a primitive or dict
55
+
56
+ self.prm_id = prm_id
57
+ self.prm_name = prm_name
58
+ self.comp_name = comp_name
59
+ self.prm_type_obj = prm_type_obj
60
+ self.prm_default_val = prm_default_val
61
+
62
+ def get_full_name(self):
63
+ """
64
+ Get the full name of this param
65
+
66
+ Returns:
67
+ The full name (component.param) for this param
68
+ """
69
+ return f"{self.comp_name}.{self.prm_name}"
70
+
71
+ def get_id(self):
72
+ return self.prm_id
73
+
74
+ def get_name(self):
75
+ return self.prm_name
76
+
77
+ def get_comp_name(self):
78
+ return self.comp_name
79
+
80
+ def get_type_obj(self):
81
+ return self.prm_type_obj
@@ -9,6 +9,8 @@ telemetry and dictionaries.
9
9
  """
10
10
  import signal
11
11
  import time
12
+ from pathlib import Path
13
+ import shutil
12
14
 
13
15
  from fprime.common.models.serialize.time_type import TimeType
14
16
 
@@ -63,6 +65,15 @@ class IntegrationTestAPI(DataHandler):
63
65
  # Initialize the logger
64
66
  self.logger = TestLogger(logpath) if logpath is not None else None
65
67
 
68
+ # Copy dictionaries and binary file to output directory
69
+ if logpath is not None:
70
+ base_dir = Path(self.pipeline.dictionary_path).parents[1]
71
+ for subdir in ['bin', 'dict']:
72
+ dir_path = base_dir / subdir
73
+ if dir_path.is_dir():
74
+ shutil.copytree(dir_path, Path(logpath) / subdir,
75
+ dirs_exist_ok=True)
76
+
66
77
  # A predicate used as a filter to choose which events to log automatically
67
78
  self.event_log_filter = self.get_event_pred()
68
79
 
@@ -215,6 +226,37 @@ class IntegrationTestAPI(DataHandler):
215
226
  """
216
227
  self.event_log_filter = self.get_event_pred(event, args, severity, time_pred)
217
228
 
229
+ def get_deployment(self):
230
+ """
231
+ Get the deployment of the target using the loaded FSW dictionary path
232
+ Returns:
233
+ The name of the deployment (str)
234
+ """
235
+ return Path(self.pipeline.dictionary_path).parent.parent.name
236
+
237
+ def wait_for_dataflow(self, count=1, channels=None, start=None, timeout=120):
238
+ """
239
+ Wait for data flow by checking for any telemetry updates within a specified timeout.
240
+
241
+ Args:
242
+ count: either an exact amount (int) or a predicate to specify how many objects to find
243
+ channels: a channel specifier or list of channel specifiers (mnemonic, ID, or predicate). All will count if None
244
+ start: an optional index or predicate to specify the earliest item to search
245
+ timeout: the number of seconds to wait before terminating the search (int)
246
+ """
247
+ if start is None:
248
+ start = self.get_latest_time()
249
+
250
+ history = self.get_telemetry_subhistory()
251
+ result = self.await_telemetry_count(
252
+ count, channels=channels, history=history, start=start, timeout=timeout
253
+ )
254
+ if not result:
255
+ msg = f'Failed to detect any data flow for {timeout} s.'
256
+ self.__log(msg, TestLogger.RED)
257
+ assert False, msg
258
+ self.remove_telemetry_subhistory(history)
259
+
218
260
  ######################################################################################
219
261
  # History Functions
220
262
  ######################################################################################
@@ -15,7 +15,7 @@ Here a test (defined by starting the name with test_) uses the fprime_test_api f
15
15
  @author lestarch
16
16
  """
17
17
  import sys
18
-
18
+ from pathlib import Path
19
19
  import pytest
20
20
 
21
21
  from fprime_gds.common.testing_fw.api import IntegrationTestAPI
@@ -38,7 +38,30 @@ def pytest_addoption(parser):
38
38
  # Reduce flags to only the long option (i.e. --something) form
39
39
  flags = [flag for flag in flags if flag.startswith("--")]
40
40
  parser.addoption(*flags, **specifiers)
41
-
41
+
42
+ # Add an option to specify JUnit XML report file
43
+ parser.addoption(
44
+ "--junit-xml-file",
45
+ action="store",
46
+ default="report.xml",
47
+ help="File to store JUnit XML report. [default: %(default)s]",
48
+ )
49
+ # Add an option to enable JUnit XML report generation to a specified location
50
+ parser.addoption(
51
+ "--gen-junitxml",
52
+ action="store_true",
53
+ help="Enable JUnitXML report generation to a specified location"
54
+ )
55
+
56
+ def pytest_configure(config):
57
+ """ This is a hook to allow plugins and conftest files to perform initial configuration
58
+
59
+ This hook is called for every initial conftest file after command line options have been parsed. After that, the
60
+ hook is called for other conftest files as they are registered.
61
+ """
62
+ # Create a JUnit XML report file to capture the test result in a specified location
63
+ if config.getoption("--gen-junitxml"):
64
+ config.option.xmlpath = Path(config.getoption("--logs")) / config.getoption("--junit-xml-file")
42
65
 
43
66
  @pytest.fixture(scope='session')
44
67
  def fprime_test_api_session(request):