fprime-gds 3.4.3__py3-none-any.whl → 3.4.4a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. fprime_gds/common/communication/adapters/base.py +30 -58
  2. fprime_gds/common/communication/adapters/ip.py +23 -5
  3. fprime_gds/common/communication/adapters/uart.py +20 -7
  4. fprime_gds/common/communication/checksum.py +1 -3
  5. fprime_gds/common/communication/framing.py +53 -4
  6. fprime_gds/common/data_types/event_data.py +6 -1
  7. fprime_gds/common/data_types/exceptions.py +16 -11
  8. fprime_gds/common/loaders/ch_json_loader.py +107 -0
  9. fprime_gds/common/loaders/ch_xml_loader.py +5 -5
  10. fprime_gds/common/loaders/cmd_json_loader.py +85 -0
  11. fprime_gds/common/loaders/dict_loader.py +1 -1
  12. fprime_gds/common/loaders/event_json_loader.py +108 -0
  13. fprime_gds/common/loaders/event_xml_loader.py +10 -6
  14. fprime_gds/common/loaders/json_loader.py +222 -0
  15. fprime_gds/common/loaders/xml_loader.py +31 -9
  16. fprime_gds/common/pipeline/dictionaries.py +38 -3
  17. fprime_gds/common/tools/seqgen.py +4 -4
  18. fprime_gds/common/utils/string_util.py +57 -65
  19. fprime_gds/common/zmq_transport.py +37 -20
  20. fprime_gds/executables/apps.py +150 -0
  21. fprime_gds/executables/cli.py +239 -103
  22. fprime_gds/executables/comm.py +17 -27
  23. fprime_gds/executables/data_product_writer.py +935 -0
  24. fprime_gds/executables/run_deployment.py +55 -14
  25. fprime_gds/executables/utils.py +24 -12
  26. fprime_gds/flask/sequence.py +1 -1
  27. fprime_gds/flask/static/addons/commanding/command-input.js +3 -2
  28. fprime_gds/plugin/__init__.py +0 -0
  29. fprime_gds/plugin/definitions.py +71 -0
  30. fprime_gds/plugin/system.py +225 -0
  31. {fprime_gds-3.4.3.dist-info → fprime_gds-3.4.4a2.dist-info}/METADATA +3 -2
  32. {fprime_gds-3.4.3.dist-info → fprime_gds-3.4.4a2.dist-info}/RECORD +37 -28
  33. {fprime_gds-3.4.3.dist-info → fprime_gds-3.4.4a2.dist-info}/WHEEL +1 -1
  34. {fprime_gds-3.4.3.dist-info → fprime_gds-3.4.4a2.dist-info}/entry_points.txt +2 -3
  35. {fprime_gds-3.4.3.dist-info → fprime_gds-3.4.4a2.dist-info}/LICENSE.txt +0 -0
  36. {fprime_gds-3.4.3.dist-info → fprime_gds-3.4.4a2.dist-info}/NOTICE.txt +0 -0
  37. {fprime_gds-3.4.3.dist-info → fprime_gds-3.4.4a2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,108 @@
1
+ """
2
+ event_json_loader.py:
3
+
4
+ Loads flight dictionary (JSON) and returns id and mnemonic based Python dictionaries of events
5
+
6
+ @author thomas-bc
7
+ """
8
+
9
+ from fprime_gds.common.templates.event_template import EventTemplate
10
+ from fprime_gds.common.utils.event_severity import EventSeverity
11
+ from fprime_gds.common.loaders.json_loader import JsonLoader
12
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
13
+
14
+
15
+ class EventJsonLoader(JsonLoader):
16
+ """Class to load json based event dictionaries"""
17
+
18
+ EVENTS_FIELD = "events"
19
+
20
+ NAME = "name"
21
+ ID = "id"
22
+ SEVERITY = "severity"
23
+ FMT_STR = "format"
24
+ DESC = "annotation"
25
+ PARAMETERS = "formalParams"
26
+
27
+ def construct_dicts(self, _):
28
+ """
29
+ Constructs and returns python dictionaries keyed on id and name
30
+
31
+ This function should not be called directly, instead, use
32
+ get_id_dict(path) and get_name_dict(path)
33
+
34
+ Args:
35
+ _: Unused argument (inherited)
36
+
37
+ Returns:
38
+ A tuple with two event dictionaries (python type dict):
39
+ (id_dict, name_dict). The keys are the events' id and name fields
40
+ respectively and the values are ChTemplate objects
41
+ """
42
+ id_dict = {}
43
+ name_dict = {}
44
+
45
+ if self.EVENTS_FIELD not in self.json_dict:
46
+ raise GdsDictionaryParsingException(
47
+ f"Ground Dictionary missing '{self.EVENTS_FIELD}' field: {str(self.json_file)}"
48
+ )
49
+
50
+ for event_dict in self.json_dict[self.EVENTS_FIELD]:
51
+ event_temp = self.construct_template_from_dict(event_dict)
52
+
53
+ id_dict[event_temp.get_id()] = event_temp
54
+ name_dict[event_temp.get_full_name()] = event_temp
55
+
56
+ return (
57
+ dict(sorted(id_dict.items())),
58
+ dict(sorted(name_dict.items())),
59
+ self.get_versions(),
60
+ )
61
+
62
+ def construct_template_from_dict(self, event_dict: dict):
63
+ try:
64
+ event_comp, event_name = event_dict[self.NAME].split(".")
65
+ event_id = event_dict[self.ID]
66
+ event_severity = EventSeverity[event_dict[self.SEVERITY]]
67
+ except ValueError as e:
68
+ raise GdsDictionaryParsingException(
69
+ f"Event dictionary entry malformed, expected name of the form '<COMP_NAME>.<EVENT_NAME>' in : {str(event_dict)}"
70
+ )
71
+ except KeyError as e:
72
+ raise GdsDictionaryParsingException(
73
+ f"{str(e)} key missing from Event dictionary entry: {str(event_dict)}"
74
+ )
75
+
76
+ event_fmt_str = JsonLoader.preprocess_format_str(
77
+ event_dict.get(self.FMT_STR, "")
78
+ )
79
+
80
+ event_desc = event_dict.get(self.DESC)
81
+
82
+ # Parse arguments
83
+ event_args = []
84
+ for arg in event_dict.get(self.PARAMETERS, []):
85
+ try:
86
+ arg_name = arg["name"]
87
+ arg_type = self.parse_type(arg["type"])
88
+ except KeyError as e:
89
+ raise GdsDictionaryParsingException(
90
+ f"{str(e)} key missing from Event parameter or its associated type in the dictionary: {str(arg)}"
91
+ )
92
+ event_args.append(
93
+ (
94
+ arg_name,
95
+ arg.get("annotation"),
96
+ arg_type,
97
+ )
98
+ )
99
+
100
+ return EventTemplate(
101
+ event_id,
102
+ event_name,
103
+ event_comp,
104
+ event_args,
105
+ event_severity,
106
+ event_fmt_str,
107
+ event_desc,
108
+ )
@@ -43,15 +43,15 @@ class EventXmlLoader(XmlLoader):
43
43
  respectively and the values are ChTemplate objects
44
44
  """
45
45
  xml_tree = self.get_xml_tree(path)
46
- versions = xml_tree.attrib.get("framework_version", "unknown"), xml_tree.attrib.get("project_version", "unknown")
46
+ versions = xml_tree.attrib.get(
47
+ "framework_version", "unknown"
48
+ ), xml_tree.attrib.get("project_version", "unknown")
47
49
 
48
50
  # Check if xml dict has events section
49
51
  event_section = self.get_xml_section(self.EVENT_SECT, xml_tree)
50
52
  if event_section is None:
51
53
  msg = f"Xml dict did not have a {self.EVENT_SECT} section"
52
- raise exceptions.GseControllerParsingException(
53
- msg
54
- )
54
+ raise exceptions.GseControllerParsingException(msg)
55
55
 
56
56
  id_dict = {}
57
57
  name_dict = {}
@@ -63,14 +63,18 @@ class EventXmlLoader(XmlLoader):
63
63
  event_name = event_dict[self.NAME_TAG]
64
64
  event_id = int(event_dict[self.ID_TAG], base=16)
65
65
  event_severity = EventSeverity[event_dict[self.SEVERITY_TAG]]
66
- event_fmt_str = event_dict[self.FMT_STR_TAG]
66
+ event_fmt_str = XmlLoader.preprocess_format_str(
67
+ event_dict[self.FMT_STR_TAG]
68
+ )
67
69
 
68
70
  event_desc = None
69
71
  if self.DESC_TAG in event_dict:
70
72
  event_desc = event_dict[self.DESC_TAG]
71
73
 
72
74
  # Parse arguments
73
- args = self.get_args_list(event, xml_tree, f"{ event_comp }::{ event_name }")
75
+ args = self.get_args_list(
76
+ event, xml_tree, f"{ event_comp }::{ event_name }"
77
+ )
74
78
 
75
79
  event_temp = EventTemplate(
76
80
  event_id,
@@ -0,0 +1,222 @@
1
+ """
2
+ json_loader.py:
3
+
4
+ Base class for all loaders that load dictionaries from json dictionaries
5
+
6
+ @author thomas-bc
7
+ """
8
+
9
+ import json
10
+ from typing import Optional
11
+
12
+ from fprime.common.models.serialize.array_type import ArrayType
13
+ from fprime.common.models.serialize.bool_type import BoolType
14
+ from fprime.common.models.serialize.enum_type import EnumType
15
+ import fprime.common.models.serialize.numerical_types as numerical_types
16
+ from fprime.common.models.serialize.serializable_type import SerializableType
17
+ from fprime.common.models.serialize.string_type import StringType
18
+ from fprime.common.models.serialize.type_base import BaseType
19
+
20
+ from fprime_gds.common.utils.string_util import preprocess_fpp_format_str
21
+ from fprime_gds.common.loaders import dict_loader
22
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
23
+
24
+
25
+ PRIMITIVE_TYPE_MAP = {
26
+ "I8": numerical_types.I8Type,
27
+ "I16": numerical_types.I16Type,
28
+ "I32": numerical_types.I32Type,
29
+ "I64": numerical_types.I64Type,
30
+ "U8": numerical_types.U8Type,
31
+ "U16": numerical_types.U16Type,
32
+ "U32": numerical_types.U32Type,
33
+ "U64": numerical_types.U64Type,
34
+ "F32": numerical_types.F32Type,
35
+ "F64": numerical_types.F64Type,
36
+ "bool": BoolType,
37
+ }
38
+
39
+
40
+ class JsonLoader(dict_loader.DictLoader):
41
+ """Class to help load JSON dictionaries"""
42
+
43
+ # Cache parsed type objects at the class level so they can be reused across subclasses
44
+ parsed_types: dict = {}
45
+
46
+ def __init__(self, json_file: str):
47
+ """
48
+ Constructor
49
+
50
+ Returns:
51
+ An initialized loader object
52
+ """
53
+ super().__init__()
54
+ self.json_file = json_file
55
+ with open(json_file, "r") as f:
56
+ self.json_dict = json.load(f)
57
+
58
+ def get_versions(self):
59
+ """
60
+ Get the framework and project versions of the dictionary
61
+
62
+ Returns:
63
+ A tuple of the framework and project versions
64
+ """
65
+ if "metadata" not in self.json_dict:
66
+ raise GdsDictionaryParsingException(
67
+ f"Dictionary has no metadata field: {self.json_file}"
68
+ )
69
+ return (
70
+ self.json_dict["metadata"].get("frameworkVersion", "unknown"),
71
+ self.json_dict["metadata"].get("projectVersion", "unknown"),
72
+ )
73
+
74
+ def parse_type(self, type_dict: dict) -> BaseType:
75
+ type_name: str = type_dict.get("name", None)
76
+
77
+ if type_name is None:
78
+ raise GdsDictionaryParsingException(
79
+ f"Dictionary entry has no `name` field: {str(type_dict)}"
80
+ )
81
+
82
+ if type_name in PRIMITIVE_TYPE_MAP:
83
+ return PRIMITIVE_TYPE_MAP[type_name]
84
+
85
+ if type_name == "string":
86
+ return StringType.construct_type(
87
+ f'String_{type_dict["size"]}', type_dict["size"]
88
+ )
89
+
90
+ # Check if type has already been parsed
91
+ if type_name in self.parsed_types:
92
+ return self.parsed_types[type_name]
93
+
94
+ # Parse new enum/array/serializable types
95
+ qualified_type = None
96
+ for type_def in self.json_dict.get("typeDefinitions", []):
97
+ if type_name == type_def.get("qualifiedName"):
98
+ qualified_type = type_def
99
+ break
100
+ else:
101
+ raise GdsDictionaryParsingException(
102
+ f"Dictionary type name has no corresponding type definition: {type_name}"
103
+ )
104
+
105
+ if qualified_type.get("kind") == "array":
106
+ return self.construct_array_type(type_name, qualified_type)
107
+
108
+ if qualified_type.get("kind") == "enum":
109
+ return self.construct_enum_type(type_name, qualified_type)
110
+
111
+ if qualified_type.get("kind") == "struct":
112
+ return self.construct_serializable_type(type_name, qualified_type)
113
+
114
+ raise GdsDictionaryParsingException(
115
+ f"Dictionary entry has unknown type {str(type_dict)}"
116
+ )
117
+
118
+ def construct_enum_type(self, type_name: str, qualified_type: dict) -> EnumType:
119
+ """
120
+ Constructs an EnumType object of the given type name and qualified type dictionary.
121
+ Caches the constructed EnumType object in the parsed_types dictionary.
122
+
123
+ Args:
124
+ type_name (str): The name of the enum type.
125
+ qualified_type (dict): A dictionary containing the qualified type information.
126
+
127
+ Returns:
128
+ EnumType: The constructed EnumType object.
129
+
130
+ """
131
+ enum_dict = {}
132
+ for member in qualified_type.get("enumeratedConstants"):
133
+ enum_dict[member["name"]] = member.get("value")
134
+ enum_type = EnumType.construct_type(
135
+ type_name,
136
+ enum_dict,
137
+ qualified_type["representationType"].get("name"),
138
+ )
139
+ self.parsed_types[type_name] = enum_type
140
+ return enum_type
141
+
142
+ def construct_array_type(self, type_name: str, qualified_type: dict) -> ArrayType:
143
+ """
144
+ Constructs an ArrayType object based on the given type name and qualified type dictionary.
145
+ Caches the constructed ArrayType object in the parsed_types dictionary.
146
+
147
+ Args:
148
+ type_name (str): The name of the array type.
149
+ qualified_type (dict): The qualified type dictionary containing information about the array type.
150
+
151
+ Returns:
152
+ ArrayType: The constructed ArrayType object.
153
+
154
+ """
155
+ array_type = ArrayType.construct_type(
156
+ type_name,
157
+ self.parse_type(qualified_type.get("elementType")),
158
+ qualified_type.get("size"),
159
+ JsonLoader.preprocess_format_str(
160
+ qualified_type["elementType"].get("format", "{}")
161
+ ),
162
+ )
163
+ self.parsed_types[type_name] = array_type
164
+ return array_type
165
+
166
+ def construct_serializable_type(
167
+ self, type_name: str, qualified_type: dict
168
+ ) -> SerializableType:
169
+ """
170
+ Constructs a SerializableType based on the given type name and qualified type dictionary.
171
+ Caches the constructed SerializableType object in the parsed_types dictionary.
172
+
173
+ Args:
174
+ type_name (str): The name of the serializable type.
175
+ qualified_type (dict): The qualified type dictionary containing information about the type.
176
+
177
+ Returns:
178
+ SerializableType: The constructed serializable type.
179
+
180
+ """
181
+ struct_members = []
182
+ for name, member_dict in qualified_type.get("members").items():
183
+ member_type_dict = member_dict["type"]
184
+ member_type_obj = self.parse_type(member_type_dict)
185
+
186
+ # For member arrays (declared inline, so we create a type on the fly)
187
+ if member_dict.get("size") is not None:
188
+ member_type_obj = ArrayType.construct_type(
189
+ f"Array_{member_type_obj.__name__}_{member_dict['size']}",
190
+ member_type_obj,
191
+ member_dict["size"],
192
+ JsonLoader.preprocess_format_str(
193
+ member_dict["type"].get("format", "{}")
194
+ ),
195
+ )
196
+ fmt_str = JsonLoader.preprocess_format_str(
197
+ member_type_obj.FORMAT if hasattr(member_type_obj, "FORMAT") else "{}"
198
+ )
199
+ description = member_type_dict.get("annotation", "")
200
+ struct_members.append((name, member_type_obj, fmt_str, description))
201
+
202
+ ser_type = SerializableType.construct_type(
203
+ type_name,
204
+ struct_members,
205
+ )
206
+ self.parsed_types[type_name] = ser_type
207
+ return ser_type
208
+
209
+ @staticmethod
210
+ def preprocess_format_str(format_str: Optional[str]) -> Optional[str]:
211
+ """Preprocess format strings before using them in Python format function
212
+ Internally, this converts FPP-style format strings to Python-style format strings
213
+
214
+ Args:
215
+ format_str (str): FPP-style format string
216
+
217
+ Returns:
218
+ str: Python-style format string
219
+ """
220
+ if format_str is None:
221
+ return None
222
+ return preprocess_fpp_format_str(format_str)
@@ -13,6 +13,7 @@ helper functions
13
13
 
14
14
  @bug No known bugs
15
15
  """
16
+
16
17
  import os
17
18
 
18
19
  from fprime.common.models.serialize.array_type import ArrayType
@@ -34,6 +35,7 @@ from fprime.common.models.serialize.serializable_type import SerializableType
34
35
  from fprime.common.models.serialize.string_type import StringType
35
36
  from lxml import etree
36
37
 
38
+ from fprime_gds.common.utils.string_util import preprocess_c_style_format_str
37
39
  from fprime_gds.common.data_types import exceptions
38
40
  from fprime_gds.version import (
39
41
  MAXIMUM_SUPPORTED_FRAMEWORK_VERSION,
@@ -258,18 +260,21 @@ class XmlLoader(dict_loader.DictLoader):
258
260
  members = []
259
261
  for memb in memb_section:
260
262
  name = memb.get(self.SER_MEMB_NAME_TAG)
261
- fmt_str = memb.get(self.SER_MEMB_FMT_STR_TAG)
263
+ fmt_str = XmlLoader.preprocess_format_str(
264
+ memb.get(self.SER_MEMB_FMT_STR_TAG)
265
+ )
262
266
  desc = memb.get(self.SER_MEMB_DESC_TAG)
263
267
  memb_type_name = memb.get(self.SER_MEMB_TYPE_TAG)
264
268
  memb_size = memb.get(self.SER_MEMB_SIZE_TAG)
265
269
  type_obj = self.parse_type(memb_type_name, memb, xml_obj)
266
270
  # memb_size is not None for member array
267
- if(memb_size is not None):
271
+ if memb_size is not None:
268
272
  type_obj = ArrayType.construct_type(
269
273
  f"Array_{type_obj.__name__}_{memb_size}",
270
274
  type_obj,
271
275
  int(memb_size),
272
- fmt_str)
276
+ fmt_str,
277
+ )
273
278
 
274
279
  members.append((name, type_obj, fmt_str, desc))
275
280
 
@@ -319,10 +324,14 @@ class XmlLoader(dict_loader.DictLoader):
319
324
  # Make config
320
325
  arr_type = arr_memb.get(self.ARR_TYPE_TAG)
321
326
  type_obj = self.parse_type(arr_type, arr_memb, xml_obj)
322
- arr_format = arr_memb.get(self.ARR_FORMAT_TAG)
327
+ arr_format = XmlLoader.preprocess_format_str(
328
+ arr_memb.get(self.ARR_FORMAT_TAG)
329
+ )
323
330
  arr_size = arr_memb.get(self.ARR_SIZE_TAG)
324
331
 
325
- arr_obj = ArrayType.construct_type(type_name, type_obj, int(arr_size), arr_format)
332
+ arr_obj = ArrayType.construct_type(
333
+ type_name, type_obj, int(arr_size), arr_format
334
+ )
326
335
 
327
336
  self.array_types[type_name] = arr_obj
328
337
  return arr_obj
@@ -372,7 +381,9 @@ class XmlLoader(dict_loader.DictLoader):
372
381
  return BoolType
373
382
  if type_name == "string":
374
383
  if self.STR_LEN_TAG not in xml_item.attrib:
375
- print(f"Trying to parse string type, but found {self.STR_LEN_TAG} field")
384
+ print(
385
+ f"Trying to parse string type, but found {self.STR_LEN_TAG} field"
386
+ )
376
387
  return None
377
388
  max_length = int(xml_item.get(self.STR_LEN_TAG, 0))
378
389
  name = f"{context or ''}::{xml_item.get(self.ARG_NAME_TAG)}String"
@@ -394,6 +405,17 @@ class XmlLoader(dict_loader.DictLoader):
394
405
 
395
406
  # Abandon all hope
396
407
  msg = f"Could not find type {type_name}"
397
- raise exceptions.GseControllerParsingException(
398
- msg
399
- )
408
+ raise exceptions.GseControllerParsingException(msg)
409
+
410
+ @staticmethod
411
+ def preprocess_format_str(format_str):
412
+ """Converts C-style format strings to Python-style format strings
413
+ For example "%x" -> "{:x}" or "%.2f" -> "{:.2f}"
414
+
415
+ Args:
416
+ format_str (str): C-style format string
417
+
418
+ Returns:
419
+ str: Python-style format string
420
+ """
421
+ return preprocess_c_style_format_str(format_str)
@@ -6,7 +6,9 @@ class called "Dictionaries".
6
6
 
7
7
  @author mstarch
8
8
  """
9
+
9
10
  import os
11
+ from pathlib import Path
10
12
 
11
13
  import fprime_gds.common.loaders.ch_py_loader
12
14
  import fprime_gds.common.loaders.ch_xml_loader
@@ -19,6 +21,9 @@ import fprime_gds.common.loaders.cmd_xml_loader
19
21
  import fprime_gds.common.loaders.event_py_loader
20
22
  import fprime_gds.common.loaders.event_xml_loader
21
23
  import fprime_gds.common.loaders.pkt_xml_loader
24
+ import fprime_gds.common.loaders.ch_json_loader
25
+ import fprime_gds.common.loaders.cmd_json_loader
26
+ import fprime_gds.common.loaders.event_json_loader
22
27
 
23
28
 
24
29
  class Dictionaries:
@@ -80,8 +85,34 @@ class Dictionaries:
80
85
  self._channel_name_dict = channel_loader.get_name_dict(
81
86
  os.path.join(dictionary, "channels")
82
87
  )
88
+ elif Path(dictionary).is_file() and ".json" in Path(dictionary).suffixes:
89
+ # Events
90
+ json_event_loader = (
91
+ fprime_gds.common.loaders.event_json_loader.EventJsonLoader(dictionary)
92
+ )
93
+ self._event_name_dict = json_event_loader.get_name_dict(None)
94
+ self._event_id_dict = json_event_loader.get_id_dict(None)
95
+ self._versions = json_event_loader.get_versions()
96
+ # Commands
97
+ json_command_loader = (
98
+ fprime_gds.common.loaders.cmd_json_loader.CmdJsonLoader(dictionary)
99
+ )
100
+ self._command_name_dict = json_command_loader.get_name_dict(None)
101
+ self._command_id_dict = json_command_loader.get_id_dict(None)
102
+ assert (
103
+ self._versions == json_command_loader.get_versions()
104
+ ), "Version mismatch while loading"
105
+ # Channels
106
+ json_channel_loader = fprime_gds.common.loaders.ch_json_loader.ChJsonLoader(
107
+ dictionary
108
+ )
109
+ self._channel_name_dict = json_channel_loader.get_name_dict(None)
110
+ self._channel_id_dict = json_channel_loader.get_id_dict(None)
111
+ assert (
112
+ self._versions == json_channel_loader.get_versions()
113
+ ), "Version mismatch while loading"
83
114
  # XML dictionaries
84
- elif os.path.isfile(dictionary):
115
+ elif Path(dictionary).is_file():
85
116
  # Events
86
117
  event_loader = fprime_gds.common.loaders.event_xml_loader.EventXmlLoader()
87
118
  self._event_id_dict = event_loader.get_id_dict(dictionary)
@@ -91,12 +122,16 @@ class Dictionaries:
91
122
  command_loader = fprime_gds.common.loaders.cmd_xml_loader.CmdXmlLoader()
92
123
  self._command_id_dict = command_loader.get_id_dict(dictionary)
93
124
  self._command_name_dict = command_loader.get_name_dict(dictionary)
94
- assert self._versions == command_loader.get_versions(), "Version mismatch while loading"
125
+ assert (
126
+ self._versions == command_loader.get_versions()
127
+ ), "Version mismatch while loading"
95
128
  # Channels
96
129
  channel_loader = fprime_gds.common.loaders.ch_xml_loader.ChXmlLoader()
97
130
  self._channel_id_dict = channel_loader.get_id_dict(dictionary)
98
131
  self._channel_name_dict = channel_loader.get_name_dict(dictionary)
99
- assert self._versions == channel_loader.get_versions(), "Version mismatch while loading"
132
+ assert (
133
+ self._versions == channel_loader.get_versions()
134
+ ), "Version mismatch while loading"
100
135
  else:
101
136
  msg = f"[ERROR] Dictionary '{dictionary}' does not exist."
102
137
  raise Exception(msg)
@@ -22,7 +22,7 @@ from fprime.common.models.serialize.time_type import TimeBase, TimeType
22
22
  from fprime_gds.common.data_types import exceptions as gseExceptions
23
23
  from fprime_gds.common.data_types.cmd_data import CmdData, CommandArgumentsException
24
24
  from fprime_gds.common.encoders.seq_writer import SeqBinaryWriter
25
- from fprime_gds.common.loaders.cmd_xml_loader import CmdXmlLoader
25
+ from fprime_gds.common.loaders.cmd_json_loader import CmdJsonLoader
26
26
  from fprime_gds.common.parsers.seq_file_parser import SeqFileParser
27
27
 
28
28
  __author__ = "Tim Canham"
@@ -56,9 +56,9 @@ def generateSequence(inputFile, outputFile, dictionary, timebase, cont=False):
56
56
  raise SeqGenException(msg)
57
57
 
58
58
  # Check the user environment:
59
- cmd_xml_dict = CmdXmlLoader()
59
+ cmd_json_dict = CmdJsonLoader(dictionary)
60
60
  try:
61
- (cmd_id_dict, cmd_name_dict, versions) = cmd_xml_dict.construct_dicts(
61
+ (cmd_id_dict, cmd_name_dict, versions) = cmd_json_dict.construct_dicts(
62
62
  dictionary
63
63
  )
64
64
  except gseExceptions.GseControllerUndefinedFileException:
@@ -153,7 +153,7 @@ def main():
153
153
  action="store",
154
154
  type=str,
155
155
  required=True,
156
- help="Dictionary file name",
156
+ help="JSON Dictionary file name",
157
157
  )
158
158
  parser.add_argument(
159
159
  "-t",