fprime-gds 3.6.2a1__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. fprime_gds/common/communication/adapters/ip.py +14 -9
  2. fprime_gds/common/communication/adapters/uart.py +34 -25
  3. fprime_gds/common/communication/ccsds/__init__.py +0 -0
  4. fprime_gds/common/communication/ccsds/apid.py +19 -0
  5. fprime_gds/common/communication/ccsds/chain.py +106 -0
  6. fprime_gds/common/communication/ccsds/space_data_link.py +196 -0
  7. fprime_gds/common/communication/ccsds/space_packet.py +129 -0
  8. fprime_gds/common/communication/framing.py +27 -32
  9. fprime_gds/common/decoders/ch_decoder.py +1 -1
  10. fprime_gds/common/decoders/event_decoder.py +9 -2
  11. fprime_gds/common/decoders/pkt_decoder.py +1 -1
  12. fprime_gds/common/distributor/distributor.py +6 -3
  13. fprime_gds/common/encoders/ch_encoder.py +2 -2
  14. fprime_gds/common/encoders/cmd_encoder.py +2 -2
  15. fprime_gds/common/encoders/event_encoder.py +2 -2
  16. fprime_gds/common/encoders/pkt_encoder.py +2 -2
  17. fprime_gds/common/encoders/seq_writer.py +2 -2
  18. fprime_gds/common/fpy/README.md +56 -0
  19. fprime_gds/common/fpy/SPEC.md +69 -0
  20. fprime_gds/common/fpy/__init__.py +0 -0
  21. fprime_gds/common/fpy/bytecode/__init__.py +0 -0
  22. fprime_gds/common/fpy/bytecode/directives.py +490 -0
  23. fprime_gds/common/fpy/codegen.py +1687 -0
  24. fprime_gds/common/fpy/grammar.lark +88 -0
  25. fprime_gds/common/fpy/main.py +40 -0
  26. fprime_gds/common/fpy/parser.py +239 -0
  27. fprime_gds/common/gds_cli/base_commands.py +1 -1
  28. fprime_gds/common/handlers.py +39 -0
  29. fprime_gds/common/loaders/fw_type_json_loader.py +54 -0
  30. fprime_gds/common/loaders/pkt_json_loader.py +125 -0
  31. fprime_gds/common/loaders/prm_json_loader.py +85 -0
  32. fprime_gds/common/logger/__init__.py +2 -2
  33. fprime_gds/common/pipeline/dictionaries.py +28 -2
  34. fprime_gds/common/pipeline/encoding.py +19 -0
  35. fprime_gds/common/pipeline/histories.py +4 -0
  36. fprime_gds/common/pipeline/standard.py +16 -2
  37. fprime_gds/common/templates/cmd_template.py +8 -0
  38. fprime_gds/common/templates/prm_template.py +81 -0
  39. fprime_gds/common/testing_fw/api.py +148 -1
  40. fprime_gds/common/testing_fw/pytest_integration.py +37 -3
  41. fprime_gds/common/tools/README.md +34 -0
  42. fprime_gds/common/tools/params.py +246 -0
  43. fprime_gds/common/utils/config_manager.py +6 -6
  44. fprime_gds/common/utils/data_desc_type.py +6 -1
  45. fprime_gds/executables/apps.py +189 -11
  46. fprime_gds/executables/cli.py +468 -127
  47. fprime_gds/executables/comm.py +5 -2
  48. fprime_gds/executables/data_product_writer.py +164 -165
  49. fprime_gds/executables/fprime_cli.py +3 -3
  50. fprime_gds/executables/run_deployment.py +13 -5
  51. fprime_gds/flask/static/js/vue-support/channel.js +1 -1
  52. fprime_gds/flask/static/js/vue-support/event.js +1 -1
  53. fprime_gds/plugin/definitions.py +86 -8
  54. fprime_gds/plugin/system.py +172 -58
  55. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/METADATA +23 -21
  56. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/RECORD +61 -41
  57. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/WHEEL +1 -1
  58. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/entry_points.txt +2 -0
  59. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info/licenses}/LICENSE.txt +0 -0
  60. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info/licenses}/NOTICE.txt +0 -0
  61. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/top_level.txt +0 -0
@@ -13,7 +13,7 @@ import sys
13
13
  INITIALIZED = False
14
14
 
15
15
 
16
- def configure_py_log(directory=None, filename=sys.argv[0], mirror_to_stdout=False):
16
+ def configure_py_log(directory=None, filename=sys.argv[0], mirror_to_stdout=False, log_level="INFO"):
17
17
  """
18
18
  Configure the python logging. If logdir is supplied, our logs will go in that directory as a log file. Otherwise,
19
19
  logs will go to the CLI.
@@ -40,6 +40,6 @@ def configure_py_log(directory=None, filename=sys.argv[0], mirror_to_stdout=Fals
40
40
  for handler in handlers:
41
41
  handler.setFormatter(formatter)
42
42
  logging.getLogger().addHandler(handler)
43
- logging.getLogger().setLevel(logging.INFO)
43
+ logging.getLogger().setLevel(log_level)
44
44
  logging.info("Logging system initialized!")
45
45
  INITIALIZED = True
@@ -14,6 +14,8 @@ from pathlib import Path
14
14
  import fprime_gds.common.loaders.ch_xml_loader
15
15
  import fprime_gds.common.loaders.cmd_xml_loader
16
16
  import fprime_gds.common.loaders.event_xml_loader
17
+ import fprime_gds.common.loaders.fw_type_json_loader
18
+ import fprime_gds.common.loaders.pkt_json_loader
17
19
  import fprime_gds.common.loaders.pkt_xml_loader
18
20
 
19
21
  # JSON Loaders
@@ -21,7 +23,6 @@ import fprime_gds.common.loaders.ch_json_loader
21
23
  import fprime_gds.common.loaders.cmd_json_loader
22
24
  import fprime_gds.common.loaders.event_json_loader
23
25
 
24
-
25
26
  class Dictionaries:
26
27
  """
27
28
  Dictionaries class to encapsulate the many different dictionaries used in the system. This includes the following
@@ -45,10 +46,11 @@ class Dictionaries:
45
46
  self._event_name_dict = None
46
47
  self._channel_name_dict = None
47
48
  self._packet_dict = None
49
+ self._fw_type_name_dict = None
48
50
  self._versions = None
49
51
  self._metadata = None
50
52
 
51
- def load_dictionaries(self, dictionary, packet_spec):
53
+ def load_dictionaries(self, dictionary, packet_spec, packet_set_name):
52
54
  """
53
55
  Loads the dictionaries based on the dictionary path supplied. Optional packet_spec is allowed to specify the
54
56
  definitions of packets.
@@ -75,6 +77,11 @@ class Dictionaries:
75
77
  )
76
78
  self._channel_name_dict = json_channel_loader.get_name_dict(None)
77
79
  self._channel_id_dict = json_channel_loader.get_id_dict(None)
80
+ # Fw Types
81
+ fw_types_loader = fprime_gds.common.loaders.fw_type_json_loader.FwTypeJsonLoader(
82
+ dictionary
83
+ )
84
+ self._fw_type_name_dict = fw_types_loader.get_name_dict(None)
78
85
  # Metadata
79
86
  self._versions = json_event_loader.get_versions()
80
87
  self._metadata = json_event_loader.get_metadata().copy()
@@ -126,6 +133,20 @@ class Dictionaries:
126
133
  self._packet_dict = packet_loader.get_id_dict(
127
134
  packet_spec, self._channel_name_dict
128
135
  )
136
+ # Otherwise use JSON dictionary to attempt automatic packet loading
137
+ elif self._metadata["dictionary_type"] == "json":
138
+ packet_loader = fprime_gds.common.loaders.pkt_json_loader.PktJsonLoader(dictionary)
139
+ if packet_set_name is None:
140
+ names = packet_loader.get_packet_set_names(None)
141
+ if len(names) == 0:
142
+ self._packet_dict = None
143
+ return
144
+ elif len(names) > 1:
145
+ raise Exception("[ERROR] Multiple packet sets, must set --packet-set-name")
146
+ packet_set_name = names[0]
147
+ self._packet_dict = packet_loader.get_id_dict(
148
+ None, packet_set_name, self._channel_name_dict
149
+ )
129
150
  else:
130
151
  self._packet_dict = None
131
152
 
@@ -158,6 +179,11 @@ class Dictionaries:
158
179
  def channel_name(self):
159
180
  """Channel dictionary by name"""
160
181
  return self._channel_name_dict
182
+
183
+ @property
184
+ def fw_type_name(self):
185
+ """Fw type name dictionary by name"""
186
+ return self._fw_type_name_dict
161
187
 
162
188
  @property
163
189
  def project_version(self):
@@ -6,6 +6,7 @@ and decoding into a single component that the be composed into the standard pipe
6
6
 
7
7
  @mstarch
8
8
  """
9
+
9
10
  import fprime_gds.common.decoders.ch_decoder
10
11
  import fprime_gds.common.decoders.event_decoder
11
12
  import fprime_gds.common.decoders.file_decoder
@@ -130,6 +131,24 @@ class EncodingDecoding:
130
131
  """
131
132
  return self.channel_decoder.deregister(consumer)
132
133
 
134
+ def register_file_consumer(self, consumer):
135
+ """
136
+ Registers a consumer with the file decoder.
137
+
138
+ :param consumer: consumer of file packets
139
+ """
140
+ self.file_decoder.register(consumer)
141
+
142
+ def remove_file_consumer(self, consumer):
143
+ """
144
+ Removes a consumer from the file decoder. Will raise an error if the history was not
145
+ previously registered.
146
+
147
+ :param consumer: consumer of channels
148
+ :return: a boolean indicating if the consumer was removed.
149
+ """
150
+ return self.file_decoder.deregister(consumer)
151
+
133
152
  def register_command_consumer(self, consumer):
134
153
  """
135
154
  Registers a history with the standard pipeline.
@@ -6,6 +6,7 @@ to compose in this code.
6
6
 
7
7
  @author mstarch
8
8
  """
9
+
9
10
  from typing import Type
10
11
 
11
12
  from fprime_gds.common.history.history import History
@@ -37,6 +38,9 @@ class Histories:
37
38
  :param coders: coders object to register histories with
38
39
  """
39
40
  self.coders = coders
41
+ # Allow implementation type to disable histories
42
+ if self._implementation_type is None:
43
+ return
40
44
  # Create histories, RAM histories for now
41
45
  self.commands = self._implementation_type()
42
46
  self.events = self._implementation_type()
@@ -8,6 +8,7 @@ below.
8
8
 
9
9
  :author: lestarch
10
10
  """
11
+
11
12
  import datetime
12
13
  import os.path
13
14
  from pathlib import Path
@@ -19,6 +20,7 @@ import fprime_gds.common.data_types.cmd_data
19
20
  import fprime_gds.common.distributor.distributor
20
21
  import fprime_gds.common.logger.data_logger
21
22
  from fprime_gds.common.transport import RoutingTag, ThreadedTCPSocketClient
23
+ from fprime_gds.common.utils.config_manager import ConfigManager
22
24
 
23
25
  # Local imports for the sake of composition
24
26
  from . import dictionaries, encoding, files, histories
@@ -54,7 +56,13 @@ class StandardPipeline:
54
56
  self.__transport_type = ThreadedTCPSocketClient
55
57
 
56
58
  def setup(
57
- self, config, dictionary, file_store, logging_prefix=None, packet_spec=None
59
+ self,
60
+ config: ConfigManager,
61
+ dictionary,
62
+ file_store,
63
+ logging_prefix=None,
64
+ packet_spec=None,
65
+ packet_set_name=None,
58
66
  ):
59
67
  """
60
68
  Setup the standard pipeline for moving data from the middleware layer through the GDS layers using the standard
@@ -84,7 +92,13 @@ class StandardPipeline:
84
92
  self.distributor = fprime_gds.common.distributor.distributor.Distributor(config)
85
93
  self.client_socket = self.__transport_type()
86
94
  # Setup dictionaries encoders and decoders
87
- self.dictionaries.load_dictionaries(self.dictionary_path, packet_spec)
95
+ self.dictionaries.load_dictionaries(
96
+ self.dictionary_path, packet_spec, packet_set_name
97
+ )
98
+ # Update config to use Fw types defined in the JSON dictionary
99
+ if self.dictionaries.fw_type_name:
100
+ for fw_type_name, fw_type in self.dictionaries.fw_type_name.items():
101
+ config.set("types", fw_type_name, fw_type)
88
102
  self.coders.setup_coders(
89
103
  self.dictionaries, self.distributor, self.client_socket, config
90
104
  )
@@ -175,3 +175,11 @@ class CmdTemplate(data_template.DataTemplate):
175
175
 
176
176
  def getArgs(self):
177
177
  return self.get_args()
178
+
179
+ def __repr__(self):
180
+ arg_strs = []
181
+ for arg in self.arguments:
182
+ arg_strs.append(arg[0] + ": " + str(arg[2]))
183
+
184
+ args_str = ", ".join(arg_strs)
185
+ return f"CmdTemplate({self.comp_name}.{self.mnemonic}, args: ({args_str}))"
@@ -0,0 +1,81 @@
1
+ """
2
+ @brief Params Template class
3
+
4
+ Instances of this class describe a parameter of a component instance (not
5
+ including a specific value)
6
+
7
+ @date Created January 27, 2025
8
+ @author Zimri Leisher
9
+
10
+ @bug Hopefully none
11
+ """
12
+
13
+ from fprime.common.models.serialize.type_base import BaseType
14
+ from fprime.common.models.serialize.type_exceptions import TypeMismatchException
15
+
16
+ from . import data_template
17
+
18
+
19
+ class PrmTemplate(data_template.DataTemplate):
20
+ """Class for param templates that describe parameters of component instances"""
21
+
22
+ def __init__(
23
+ self,
24
+ prm_id: int,
25
+ prm_name: str,
26
+ comp_name: str,
27
+ prm_type_obj: BaseType,
28
+ prm_default_val,
29
+ ):
30
+ """
31
+ Constructor
32
+
33
+ Args:
34
+ prm_id: the id of the parameter
35
+ prm_name: the name of the parameter
36
+ comp_name: the name of the component instance owning this parameter
37
+ prm_type_obj: the instance of BaseType corresponding to the type of this parameter
38
+ prm_default_val: the default value of this parameter, in raw JSON form
39
+ """
40
+ super().__init__()
41
+ # Make sure correct types are passed
42
+ if not isinstance(prm_id, int):
43
+ raise TypeMismatchException(int, type(prm_id))
44
+
45
+ if not isinstance(prm_name, str):
46
+ raise TypeMismatchException(str, type(prm_name))
47
+
48
+ if not isinstance(comp_name, str):
49
+ raise TypeMismatchException(str, type(comp_name))
50
+
51
+ if not issubclass(prm_type_obj, BaseType):
52
+ raise TypeMismatchException(BaseType, prm_type_obj)
53
+
54
+ # prm_default_val is an arbitrary type, likely a primitive or dict
55
+
56
+ self.prm_id = prm_id
57
+ self.prm_name = prm_name
58
+ self.comp_name = comp_name
59
+ self.prm_type_obj = prm_type_obj
60
+ self.prm_default_val = prm_default_val
61
+
62
+ def get_full_name(self):
63
+ """
64
+ Get the full name of this param
65
+
66
+ Returns:
67
+ The full name (component.param) for this param
68
+ """
69
+ return f"{self.comp_name}.{self.prm_name}"
70
+
71
+ def get_id(self):
72
+ return self.prm_id
73
+
74
+ def get_name(self):
75
+ return self.prm_name
76
+
77
+ def get_comp_name(self):
78
+ return self.comp_name
79
+
80
+ def get_type_obj(self):
81
+ return self.prm_type_obj
@@ -9,6 +9,9 @@ telemetry and dictionaries.
9
9
  """
10
10
  import signal
11
11
  import time
12
+ from pathlib import Path
13
+ import shutil
14
+ import json
12
15
 
13
16
  from fprime.common.models.serialize.time_type import TimeType
14
17
 
@@ -28,15 +31,17 @@ class IntegrationTestAPI(DataHandler):
28
31
 
29
32
  NOW = "NOW"
30
33
 
31
- def __init__(self, pipeline, logpath=None, fsw_order=True):
34
+ def __init__(self, pipeline, deployment_config=None, logpath=None, fsw_order=True):
32
35
  """
33
36
  Initializes API: constructs and registers test histories.
34
37
  Args:
35
38
  pipeline: a pipeline object providing access to basic GDS functionality
39
+ deployment_config: path to deployment configuration file
36
40
  logpath: an optional output destination for the api test log
37
41
  fsw_order: a flag to determine whether the API histories will maintain FSW time order.
38
42
  """
39
43
  self.pipeline = pipeline
44
+ self.deployment_config = deployment_config
40
45
 
41
46
  # these are owned by the GDS and will not be modified by the test API.
42
47
  self.aggregate_command_history = pipeline.histories.commands
@@ -63,6 +68,15 @@ class IntegrationTestAPI(DataHandler):
63
68
  # Initialize the logger
64
69
  self.logger = TestLogger(logpath) if logpath is not None else None
65
70
 
71
+ # Copy dictionaries and binary file to output directory
72
+ if logpath is not None:
73
+ base_dir = Path(self.pipeline.dictionary_path).parents[1]
74
+ for subdir in ['bin', 'dict']:
75
+ dir_path = base_dir / subdir
76
+ if dir_path.is_dir():
77
+ shutil.copytree(dir_path, Path(logpath) / subdir,
78
+ dirs_exist_ok=True)
79
+
66
80
  # A predicate used as a filter to choose which events to log automatically
67
81
  self.event_log_filter = self.get_event_pred()
68
82
 
@@ -215,6 +229,139 @@ class IntegrationTestAPI(DataHandler):
215
229
  """
216
230
  self.event_log_filter = self.get_event_pred(event, args, severity, time_pred)
217
231
 
232
+ def get_deployment(self):
233
+ """
234
+ Get the deployment of the target using the loaded FSW dictionary.
235
+
236
+ Returns:
237
+ The name of the deployment (str) or None if not found
238
+ """
239
+ dictionary = str(self.pipeline.dictionary_path)
240
+
241
+ try:
242
+ with open(dictionary, 'r') as file:
243
+ data = json.load(file)
244
+ return data['metadata'].get("deploymentName")
245
+ except FileNotFoundError:
246
+ msg = f"Error: File not found at path: {dictionary}"
247
+ self.__log(msg, TestLogger.YELLOW)
248
+ return None
249
+ except json.JSONDecodeError as e:
250
+ msg = f"Error decoding JSON: {e}"
251
+ self.__log(msg, TestLogger.YELLOW)
252
+ return None
253
+ except Exception as e:
254
+ msg = f"An unexpected error occurred: {e} is an unknown key"
255
+ self.__log(msg, TestLogger.YELLOW)
256
+ return None
257
+
258
+ def wait_for_dataflow(self, count=1, channels=None, start=None, timeout=120):
259
+ """
260
+ Wait for data flow by checking for any telemetry updates within a specified timeout.
261
+
262
+ Args:
263
+ count: either an exact amount (int) or a predicate to specify how many objects to find
264
+ channels: a channel specifier or list of channel specifiers (mnemonic, ID, or predicate). All will count if None
265
+ start: an optional index or predicate to specify the earliest item to search
266
+ timeout: the number of seconds to wait before terminating the search (int)
267
+ """
268
+ if start is None:
269
+ start = self.get_latest_time()
270
+
271
+ history = self.get_telemetry_subhistory()
272
+ result = self.await_telemetry_count(
273
+ count, channels=channels, history=history, start=start, timeout=timeout
274
+ )
275
+ if not result:
276
+ msg = f'Failed to detect any data flow for {timeout} s.'
277
+ self.__log(msg, TestLogger.RED)
278
+ assert False, msg
279
+ self.remove_telemetry_subhistory(history)
280
+
281
+ def get_config_file_path(self):
282
+ """
283
+ Accessor for IntegrationTestAPI's deployment configuration file.
284
+
285
+ Returns:
286
+ path to user-specified deployment configuration file (str) or None if not defined
287
+ """
288
+ if self.deployment_config:
289
+ return self.deployment_config
290
+ else:
291
+ return None
292
+
293
+ def load_config_file(self):
294
+ """
295
+ Load user-specified deployment configuration JSON file.
296
+
297
+ Returns:
298
+ JSON object as a dictionary
299
+ """
300
+ config_file = self.get_config_file_path()
301
+
302
+ try:
303
+ with open(config_file, 'r') as file:
304
+ result = json.load(file)
305
+ return result
306
+ except FileNotFoundError:
307
+ msg = f"Error: File not found at path {config_file}"
308
+ self.__log(msg, TestLogger.RED)
309
+ assert False, msg
310
+ except json.JSONDecodeError as e:
311
+ msg = f"Error decoding JSON: {e}"
312
+ self.__log(msg, TestLogger.RED)
313
+ assert False, msg
314
+ except Exception as e:
315
+ msg = f"An unexpected error occurred: {e}"
316
+ self.__log(msg, TestLogger.RED)
317
+ assert False, msg
318
+
319
+ def get_mnemonic(self, comp=None, name=None):
320
+ """
321
+ Get deployment mnemonic of specified item from user-specified deployment
322
+ configuration file.
323
+
324
+ Args:
325
+ comp: qualified name of the component instance (str), i.e. "<component>.<instance>"
326
+ name: command, channel, or event name (str) [optional]
327
+ Returns:
328
+ deployment mnemonic of specified item (str) or native mnemonic (str) if not found
329
+ """
330
+ data = self.load_config_file()
331
+
332
+ if data:
333
+ try:
334
+ mnemonic = data[comp]
335
+ return f"{mnemonic}.{name}" if name else f"{mnemonic}"
336
+ except KeyError:
337
+ self.__log(f"Error: {comp} not found", TestLogger.YELLOW)
338
+ return f"{comp}.{name}" if name else f"{comp}"
339
+ else:
340
+ return f"{comp}.{name}" if name else f"{comp}"
341
+
342
+ def get_prm_db_path(self) -> str:
343
+ """
344
+ Get file path to parameter db from user-specified deployment configuration file.
345
+
346
+ Returns:
347
+ file path to parameter db (str) or None if not found
348
+ """
349
+ data = self.load_config_file()
350
+
351
+ if data:
352
+ try:
353
+ filepath = data["Svc.PrmDb.filename"]
354
+ if filepath.startswith('/'):
355
+ return filepath
356
+ else:
357
+ msg = f"Error: {filepath} did not start with a forward slash"
358
+ self.__log(msg, TestLogger.RED)
359
+ assert False, msg
360
+ except KeyError:
361
+ return None
362
+ else:
363
+ return None
364
+
218
365
  ######################################################################################
219
366
  # History Functions
220
367
  ######################################################################################
@@ -15,7 +15,7 @@ Here a test (defined by starting the name with test_) uses the fprime_test_api f
15
15
  @author lestarch
16
16
  """
17
17
  import sys
18
-
18
+ from pathlib import Path
19
19
  import pytest
20
20
 
21
21
  from fprime_gds.common.testing_fw.api import IntegrationTestAPI
@@ -38,7 +38,36 @@ def pytest_addoption(parser):
38
38
  # Reduce flags to only the long option (i.e. --something) form
39
39
  flags = [flag for flag in flags if flag.startswith("--")]
40
40
  parser.addoption(*flags, **specifiers)
41
-
41
+
42
+ # Add an option to specify JUnit XML report file
43
+ parser.addoption(
44
+ "--junit-xml-file",
45
+ action="store",
46
+ default="report.xml",
47
+ help="File to store JUnit XML report. [default: %(default)s]",
48
+ )
49
+ # Add an option to enable JUnit XML report generation to a specified location
50
+ parser.addoption(
51
+ "--gen-junitxml",
52
+ action="store_true",
53
+ help="Enable JUnitXML report generation to a specified location"
54
+ )
55
+ # Add an option to specify a json config file that maps components
56
+ parser.addoption(
57
+ "--deployment-config",
58
+ action="store",
59
+ help="Path to JSON configuration file for mapping deployment components"
60
+ )
61
+
62
+ def pytest_configure(config):
63
+ """ This is a hook to allow plugins and conftest files to perform initial configuration
64
+
65
+ This hook is called for every initial conftest file after command line options have been parsed. After that, the
66
+ hook is called for other conftest files as they are registered.
67
+ """
68
+ # Create a JUnit XML report file to capture the test result in a specified location
69
+ if config.getoption("--gen-junitxml"):
70
+ config.option.xmlpath = Path(config.getoption("--logs")) / config.getoption("--junit-xml-file")
42
71
 
43
72
  @pytest.fixture(scope='session')
44
73
  def fprime_test_api_session(request):
@@ -62,6 +91,7 @@ def fprime_test_api_session(request):
62
91
  pipeline_parser = StandardPipelineParser()
63
92
  pipeline = None
64
93
  api = None
94
+ deployment_config = None
65
95
  try:
66
96
  # Parse the command line arguments into a client connection
67
97
  arg_ns = pipeline_parser.handle_arguments(request.config.known_args_namespace, client=True)
@@ -69,8 +99,12 @@ def fprime_test_api_session(request):
69
99
  # Build a new pipeline with the parsed and processed arguments
70
100
  pipeline = pipeline_parser.pipeline_factory(arg_ns, pipeline)
71
101
 
102
+ # Get deployment configuration from command line arguments
103
+ if request.config.option.deployment_config:
104
+ deployment_config = request.config.option.deployment_config
105
+
72
106
  # Build and set up the integration test api
73
- api = IntegrationTestAPI(pipeline, arg_ns.logs)
107
+ api = IntegrationTestAPI(pipeline, deployment_config, arg_ns.logs)
74
108
  api.setup()
75
109
 
76
110
  # Return the API. Note: the second call here-in will begin after the yield and clean-up after the test
@@ -0,0 +1,34 @@
1
+ # FPrime GDS tools
2
+ ## fprime-prm-write
3
+ ### JSON file reference
4
+ JSON files for the `fprime-prm-write` tool should take the following form:
5
+ ```json
6
+ {
7
+ "componentInstanceOne": {
8
+ "parameterNameOne": "parameter value",
9
+ "parameterNameTwo": ["a", "b", "c"],
10
+ "parameterNameThree": {
11
+ "complexValue": [123, 456]
12
+ }
13
+ },
14
+ "componentInstanceTwo": {
15
+ "parameterNameFour": true
16
+ }
17
+ }
18
+ ```
19
+ The JSON should consist of a key-value map of component instance names to an inner key-value map. The inner key-value map should consist of parameter name-to-value map entries. The parameter values support complex FPrime types, such as nested structs, arrays or enum constants. Structs are instantiated with key-value maps, where the keys are the field names and the values are the field values. Arrays are just JSON arrays, and enum constants are represented as strings.
20
+
21
+ ### How to Initialize a ParamDB .dat File
22
+
23
+ The `fprime-prm-write` tool can be used to create a `.dat` file compatible with the `PrmDb` component from a json file. To use, create a compatible JSON file as defined in the JSON File Reference above, and pass it in to the tool using the `dat` subcommand, like so:
24
+ ```
25
+ fprime-prm-write dat <json file> --dictionary <path to compiled FPrime dict>
26
+ ```
27
+ You should then have a `.dat` file which can be passed in to the `PrmDb`. Note, this `.dat` file will only have entries for the parameters specified in the JSON file. If you want it to instead have a value for all parameters which have a default value, you can add the `--defaults` option. Then, the generated `.dat` file will essentially reset all parameters back to default, except for those specified in the JSON file.
28
+
29
+ ### How to Create a .seq File From a Parameter JSON File
30
+ Sometimes, you may want to update parameters while the FPrime application is running. This can be accomplished with a sequence of `_PRM_SET` commands, which the `fprime-prm-write` tool can automatically create for you. To use, create a compatible JSON file as defined in the JSON File Reference above, and pass it in to the tool using the `seq` subcommand, like so:
31
+ ```
32
+ fprime-prm-write seq <json file> --dictionary <path to compiled FPrime dict>
33
+ ```
34
+ You should then have a `.seq` file which can be compiled and executed by the `CmdSequencer`.