fprime-gds 3.6.2a1__py3-none-any.whl → 4.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. fprime_gds/common/communication/adapters/ip.py +14 -9
  2. fprime_gds/common/communication/adapters/uart.py +34 -25
  3. fprime_gds/common/communication/ccsds/__init__.py +0 -0
  4. fprime_gds/common/communication/ccsds/apid.py +19 -0
  5. fprime_gds/common/communication/ccsds/chain.py +106 -0
  6. fprime_gds/common/communication/ccsds/space_data_link.py +196 -0
  7. fprime_gds/common/communication/ccsds/space_packet.py +129 -0
  8. fprime_gds/common/communication/framing.py +27 -32
  9. fprime_gds/common/decoders/ch_decoder.py +1 -1
  10. fprime_gds/common/decoders/event_decoder.py +9 -2
  11. fprime_gds/common/decoders/pkt_decoder.py +1 -1
  12. fprime_gds/common/distributor/distributor.py +6 -3
  13. fprime_gds/common/encoders/ch_encoder.py +2 -2
  14. fprime_gds/common/encoders/cmd_encoder.py +2 -2
  15. fprime_gds/common/encoders/event_encoder.py +2 -2
  16. fprime_gds/common/encoders/pkt_encoder.py +2 -2
  17. fprime_gds/common/encoders/seq_writer.py +2 -2
  18. fprime_gds/common/fpy/README.md +56 -0
  19. fprime_gds/common/fpy/SPEC.md +69 -0
  20. fprime_gds/common/fpy/__init__.py +0 -0
  21. fprime_gds/common/fpy/bytecode/__init__.py +0 -0
  22. fprime_gds/common/fpy/bytecode/directives.py +490 -0
  23. fprime_gds/common/fpy/codegen.py +1687 -0
  24. fprime_gds/common/fpy/grammar.lark +88 -0
  25. fprime_gds/common/fpy/main.py +40 -0
  26. fprime_gds/common/fpy/parser.py +239 -0
  27. fprime_gds/common/gds_cli/base_commands.py +1 -1
  28. fprime_gds/common/handlers.py +39 -0
  29. fprime_gds/common/loaders/fw_type_json_loader.py +54 -0
  30. fprime_gds/common/loaders/pkt_json_loader.py +125 -0
  31. fprime_gds/common/loaders/prm_json_loader.py +85 -0
  32. fprime_gds/common/logger/__init__.py +2 -2
  33. fprime_gds/common/pipeline/dictionaries.py +28 -2
  34. fprime_gds/common/pipeline/encoding.py +19 -0
  35. fprime_gds/common/pipeline/histories.py +4 -0
  36. fprime_gds/common/pipeline/standard.py +16 -2
  37. fprime_gds/common/templates/cmd_template.py +8 -0
  38. fprime_gds/common/templates/prm_template.py +81 -0
  39. fprime_gds/common/testing_fw/api.py +148 -1
  40. fprime_gds/common/testing_fw/pytest_integration.py +37 -3
  41. fprime_gds/common/tools/README.md +34 -0
  42. fprime_gds/common/tools/params.py +246 -0
  43. fprime_gds/common/utils/config_manager.py +6 -6
  44. fprime_gds/common/utils/data_desc_type.py +6 -1
  45. fprime_gds/executables/apps.py +189 -11
  46. fprime_gds/executables/cli.py +468 -127
  47. fprime_gds/executables/comm.py +5 -2
  48. fprime_gds/executables/data_product_writer.py +164 -165
  49. fprime_gds/executables/fprime_cli.py +3 -3
  50. fprime_gds/executables/run_deployment.py +13 -5
  51. fprime_gds/flask/static/js/vue-support/channel.js +1 -1
  52. fprime_gds/flask/static/js/vue-support/event.js +1 -1
  53. fprime_gds/plugin/definitions.py +86 -8
  54. fprime_gds/plugin/system.py +172 -58
  55. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/METADATA +23 -21
  56. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/RECORD +61 -41
  57. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/WHEEL +1 -1
  58. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/entry_points.txt +2 -0
  59. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info/licenses}/LICENSE.txt +0 -0
  60. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info/licenses}/NOTICE.txt +0 -0
  61. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,88 @@
1
+ # Adapted from: https://docs.python.org/3/reference/grammar.html and the Lark Python grammar
2
+
3
+ input: (_NEWLINE | _stmt)*
4
+
5
+ _literal: number | string | boolean
6
+
7
+ _stmt: _small_stmt [_NEWLINE] | _compound_stmt
8
+ _small_stmt: (_expr_stmt | assign | pass_stmt )
9
+ _compound_stmt: if_stmt
10
+ pass_stmt: "pass"
11
+
12
+ _expr_stmt: _expr
13
+
14
+
15
+ # assignment
16
+
17
+ # can currently only assign constant values to vars
18
+ # for some reason if i use _reference here i don't get a None in the optional spot
19
+ assign: var [":" (get_attr | get_item | var)] "=" _expr
20
+
21
+
22
+
23
+ # branching
24
+
25
+ if_stmt: "if" _expr ":" body elifs ["else" ":" body]
26
+ elifs: elif_*
27
+ elif_: "elif" _expr ":" body
28
+ body: _NEWLINE _INDENT _stmt+ _DEDENT
29
+
30
+ _expr: _test
31
+
32
+ # logical tests
33
+ _test: or_test
34
+ ?or_test: and_test ("or" and_test)*
35
+ ?and_test: not_test_ ("and" not_test_)*
36
+ ?not_test_: "not" not_test_ -> not_test
37
+ | comparison
38
+ | atom
39
+ comparison: atom comp_op atom
40
+ comp_op: COMPARISON_OP
41
+
42
+ arguments: _expr ("," _expr)*
43
+
44
+ ?atom: _reference "(" [arguments] ")" -> func_call
45
+ | _reference
46
+ | _literal
47
+ | "(" _expr ")"
48
+
49
+ get_item: _reference "[" number "]"
50
+ get_attr: _reference "." name
51
+ var: name
52
+
53
+ _reference: get_item | get_attr | var
54
+
55
+ # not used in grammar, but may appear in "node" passed from Parser to Compiler
56
+ encoding_decl: name
57
+
58
+ number: DEC_NUMBER | FLOAT_NUMBER
59
+ string: STRING
60
+ boolean: CONST_FALSE | CONST_TRUE
61
+
62
+ # Other terminals
63
+
64
+ _NEWLINE: ( /\r?\n[\t ]*/ | COMMENT )+
65
+
66
+ %ignore /[\t \f]+/ // WS
67
+ %ignore /\\[\t \f]*\r?\n/ // LINE_CONT
68
+ %ignore COMMENT
69
+ %declare _INDENT _DEDENT
70
+
71
+
72
+ # Python terminals
73
+
74
+ !name: NAME
75
+ NAME: /[^\W\d]\w*/
76
+ COMMENT: /#[^\n]*/
77
+ CONST_TRUE: "True"
78
+ CONST_FALSE: "False"
79
+ COMPARISON_OP: ">" | "<" | "<=" | ">=" | "==" | "!="
80
+
81
+ STRING: /("(?!"").*?(?<!\\)(\\\\)*?"|'(?!'').*?(?<!\\)(\\\\)*?')/i
82
+
83
+ _SPECIAL_DEC: "0".."9" ("_"? "0".."9" )*
84
+ DEC_NUMBER: "-"? "1".."9" ("_"? "0".."9" )*
85
+ | "-"? "0" ("_"? "0" )* /(?![1-9])/
86
+
87
+ DECIMAL: "." _SPECIAL_DEC | _SPECIAL_DEC "." _SPECIAL_DEC?
88
+ FLOAT_NUMBER.2: "-"? _SPECIAL_DEC DECIMAL ["e" ["-"] _SPECIAL_DEC]
@@ -0,0 +1,40 @@
1
+ import argparse
2
+ from pathlib import Path
3
+
4
+ from fprime_gds.common.fpy.parser import parse
5
+ from fprime_gds.common.fpy.codegen import compile
6
+ from fprime_gds.common.fpy.bytecode.directives import serialize_directives
7
+
8
+
9
+ def main():
10
+ arg_parser = argparse.ArgumentParser()
11
+ arg_parser.add_argument("input", type=Path, help="The input .fpy file")
12
+ arg_parser.add_argument(
13
+ "-o",
14
+ "--output",
15
+ type=Path,
16
+ required=False,
17
+ default=None,
18
+ help="The output .bin path",
19
+ )
20
+ arg_parser.add_argument(
21
+ "-d",
22
+ "--dictionary",
23
+ type=Path,
24
+ required=True,
25
+ help="The FPrime dictionary .json file",
26
+ )
27
+
28
+ args = arg_parser.parse_args()
29
+
30
+ if not args.input.exists():
31
+ print(f"Input file {args.input} does not exist")
32
+ exit(-1)
33
+
34
+ body = parse(args.input.read_text())
35
+ directives = compile(body, args.dictionary)
36
+ output = args.output
37
+ if output is None:
38
+ output = args.input.with_suffix(".bin")
39
+ serialize_directives(directives, output)
40
+ print("Done")
@@ -0,0 +1,239 @@
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass, field
3
+ from pathlib import Path
4
+ from typing import Literal as TypingLiteral, Union
5
+ from lark.indenter import PythonIndenter
6
+ from lark import Lark, Transformer, v_args
7
+ from lark.tree import Meta
8
+
9
+ fpy_grammar_str = (Path(__file__).parent / "grammar.lark").read_text()
10
+
11
+ input_text = None
12
+
13
+
14
+ def parse(text: str):
15
+ parser = Lark(
16
+ fpy_grammar_str,
17
+ start="input",
18
+ parser="lalr",
19
+ postlex=PythonIndenter(),
20
+ propagate_positions=True,
21
+ maybe_placeholders=True,
22
+ )
23
+
24
+ global input_text
25
+ input_text = text
26
+ tree = parser.parse(text, on_error=lambda x: print("Error"))
27
+ transformed = FpyTransformer().transform(tree)
28
+ return transformed
29
+
30
+
31
+ @dataclass
32
+ class Ast:
33
+ meta: Meta = field(repr=False)
34
+ id: int = field(init=False, repr=False, default=None)
35
+ node_text: str = field(init=False, repr=False, default=None)
36
+
37
+ def __post_init__(self):
38
+ if not hasattr(self.meta, "start_pos"):
39
+ self.node_text = ""
40
+ return
41
+ self.node_text = (
42
+ input_text[self.meta.start_pos : self.meta.end_pos]
43
+ .replace("\n", " ")
44
+ .strip()
45
+ )
46
+
47
+ def __hash__(self):
48
+ return hash(self.id)
49
+
50
+ def __repr__(self):
51
+ return f"{self.__class__.__name__}({self.node_text})"
52
+
53
+
54
+ @dataclass
55
+ class AstVar(Ast):
56
+ var: str
57
+
58
+
59
+ @dataclass()
60
+ class AstString(Ast):
61
+ value: str
62
+
63
+
64
+ @dataclass
65
+ class AstNumber(Ast):
66
+ value: int | float
67
+
68
+
69
+ @dataclass
70
+ class AstBoolean(Ast):
71
+ value: TypingLiteral[True] | TypingLiteral[False]
72
+
73
+
74
+ AstLiteral = AstString | AstNumber | AstBoolean
75
+
76
+
77
+ @dataclass
78
+ class AstGetAttr(Ast):
79
+ parent: "AstReference"
80
+ attr: str
81
+
82
+
83
+ @dataclass
84
+ class AstGetItem(Ast):
85
+ parent: "AstReference"
86
+ item: AstNumber
87
+
88
+
89
+ @dataclass
90
+ class AstFuncCall(Ast):
91
+ func: "AstReference"
92
+ args: list["AstExpr"] | None
93
+
94
+
95
+ @dataclass
96
+ class AstInfixOp(Ast):
97
+ value: str
98
+
99
+
100
+ @dataclass()
101
+ class AstPass(Ast):
102
+ pass
103
+
104
+
105
+ @dataclass
106
+ class AstComparison(Ast):
107
+ lhs: "AstExpr"
108
+ op: AstInfixOp
109
+ rhs: "AstExpr"
110
+
111
+
112
+ @dataclass
113
+ class AstNot(Ast):
114
+ value: "AstExpr"
115
+
116
+
117
+ @dataclass
118
+ class AstAnd(Ast):
119
+ values: list["AstExpr"]
120
+
121
+
122
+ @dataclass
123
+ class AstOr(Ast):
124
+ values: list["AstExpr"]
125
+
126
+
127
+ AstTest = AstOr | AstAnd | AstNot | AstComparison
128
+
129
+
130
+ AstReference = AstGetAttr | AstGetItem | AstVar
131
+ AstExpr = Union[AstFuncCall, AstTest, AstLiteral, AstReference]
132
+
133
+
134
+ @dataclass
135
+ class AstAssign(Ast):
136
+ variable: AstVar
137
+ var_type: AstReference | None
138
+ value: AstExpr
139
+
140
+
141
+ @dataclass
142
+ class AstElif(Ast):
143
+ condition: AstExpr
144
+ body: "AstBody"
145
+
146
+
147
+ @dataclass
148
+ class AstElifs(Ast):
149
+ cases: list[AstElif]
150
+
151
+
152
+ @dataclass()
153
+ class AstIf(Ast):
154
+ condition: AstExpr
155
+ body: "AstBody"
156
+ elifs: AstElifs | None
157
+ els: Union["AstBody", None]
158
+
159
+
160
+ AstStmt = Union[AstExpr, AstAssign, AstPass, AstIf]
161
+
162
+
163
+ @dataclass
164
+ class AstBody(Ast):
165
+ stmts: list[AstStmt]
166
+
167
+
168
+ for cls in Ast.__subclasses__():
169
+ cls.__hash__ = Ast.__hash__
170
+ # cls.__repr__ = Ast.__repr__
171
+
172
+
173
+ @v_args(meta=False, inline=False)
174
+ def as_list(self, tree):
175
+ return list(tree)
176
+
177
+
178
+ def no_inline_or_meta(type):
179
+ @v_args(meta=False, inline=False)
180
+ def wrapper(self, tree):
181
+ return type(tree)
182
+
183
+ return wrapper
184
+
185
+
186
+ def no_inline(type):
187
+ @v_args(meta=True, inline=False)
188
+ def wrapper(self, meta, tree):
189
+ return type(meta, tree)
190
+
191
+ return wrapper
192
+
193
+
194
+ def no_meta(type):
195
+ @v_args(meta=False, inline=True)
196
+ def wrapper(self, tree):
197
+ return type(tree)
198
+
199
+ return wrapper
200
+
201
+ def handle_str(meta, s: str):
202
+ return s.strip("'").strip('"')
203
+
204
+
205
+ @v_args(meta=True, inline=True)
206
+ class FpyTransformer(Transformer):
207
+ input = no_inline(AstBody)
208
+ pass_stmt = AstPass
209
+
210
+ assign = AstAssign
211
+
212
+ if_stmt = AstIf
213
+ elifs = no_inline(AstElifs)
214
+ elif_ = AstElif
215
+ body = no_inline(AstBody)
216
+ or_test = no_inline(AstOr)
217
+ and_test = no_inline(AstAnd)
218
+ not_test = AstNot
219
+ comparison = AstComparison
220
+ comp_op = AstInfixOp
221
+
222
+ func_call = AstFuncCall
223
+ arguments = no_inline_or_meta(list)
224
+
225
+ string = AstString
226
+ number = AstNumber
227
+ boolean = AstBoolean
228
+ name = no_meta(str)
229
+ get_attr = AstGetAttr
230
+ get_item = AstGetItem
231
+ var = AstVar
232
+
233
+ NAME = str
234
+ DEC_NUMBER = int
235
+ FLOAT_NUMBER = float
236
+ COMPARISON_OP = str
237
+ STRING = handle_str
238
+ CONST_TRUE = lambda a, b: True
239
+ CONST_FALSE = lambda a, b: False
@@ -130,7 +130,7 @@ class BaseCommand(abc.ABC):
130
130
  passes the given filter
131
131
  """
132
132
  project_dictionary = Dictionaries()
133
- project_dictionary.load_dictionaries(dictionary_path, packet_spec=None)
133
+ project_dictionary.load_dictionaries(dictionary_path, packet_spec=None, packet_set_name=None)
134
134
  items = cls._get_item_list(project_dictionary, search_filter)
135
135
  return cls._get_item_list_string(items, json)
136
136
 
@@ -6,7 +6,10 @@ defines the "DataHandler" base class for handling data.
6
6
 
7
7
  @author mstarch
8
8
  """
9
+
9
10
  import abc
11
+ from typing import List, Type
12
+ from fprime_gds.plugin.definitions import gds_plugin_specification
10
13
 
11
14
 
12
15
  class DataHandler(abc.ABC):
@@ -27,6 +30,42 @@ class DataHandler(abc.ABC):
27
30
  """
28
31
 
29
32
 
33
+ class DataHandlerPlugin(DataHandler, abc.ABC):
34
+ """PLugin class allowing for custom data handlers
35
+
36
+ This class acts as a DataHandler class with the addition that it can be used as a plugin and thus self reports the
37
+ data types it handles (whereas DataHandler leaves that up to the registration call). Users shall concretely subclass
38
+ this class with their own data handling functionality.
39
+ """
40
+
41
+ @abc.abstractmethod
42
+ def get_handled_descriptors() -> List[str]:
43
+ """Return a list of data descriptor names this plugin handles"""
44
+ raise NotImplementedError()
45
+
46
+ @classmethod
47
+ @gds_plugin_specification
48
+ def register_data_handler_plugin(cls) -> Type["DataHandlerPlugin"]:
49
+ """Register a plugin to provide post-decoding data handling capabilities
50
+
51
+ Plugin hook for registering a plugin that supplies a DataHandler implementation. Implementors of this hook must
52
+ return a non-abstract subclass of DataHandlerPlugin. This class will be provided as a data handling
53
+ that is automatically enabled. Users may disable this via the command line. This data handler will be supplied
54
+ all data types returned by the `get_data_types()` method.
55
+
56
+ This DataHandler will run within the standard GDS (UI) process. Users wanting a separate process shall use a
57
+ GdsApp plugin instead.
58
+
59
+ Note: users should return the class, not an instance of the class. Needed arguments for instantiation are
60
+ determined from class methods, solicited via the command line, and provided at construction time to the chosen
61
+ instantiation.
62
+
63
+ Returns:
64
+ DataHandlerPlugin subclass (not instance)
65
+ """
66
+ raise NotImplementedError()
67
+
68
+
30
69
  class HandlerRegistrar(abc.ABC):
31
70
  """
32
71
  Defines a class that will take in registrants and remember them for calling back later. These objects should be of
@@ -0,0 +1,54 @@
1
+ """
2
+ fw_type_json_loader.py:
3
+
4
+ Loads flight dictionary (JSON) and returns name based Python dictionaries of Fw types
5
+
6
+ @author jawest
7
+ """
8
+
9
+ from fprime_gds.common.loaders.json_loader import JsonLoader
10
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
11
+
12
+ class FwTypeJsonLoader(JsonLoader):
13
+ """Class to load python based Fw type dictionaries"""
14
+
15
+ TYPE_DEFINITIONS_FIELD = "typeDefinitions"
16
+
17
+ def construct_dicts(self, _):
18
+ """
19
+ Constructs and returns python dictionaries keyed on id and name
20
+
21
+ Args:
22
+ _: Unused argument (inherited)
23
+ Returns:
24
+ A tuple with two Fw type dictionaries (python type dict):
25
+ (id_dict, name_dict). The keys should be the type id and
26
+ name fields respectively and the values should be type name
27
+ strings. Note: An empty id dictionary is returned since there
28
+ are no id fields in the Fw type alias JSON dictionary entries.
29
+ """
30
+ id_dict = {}
31
+ name_dict = {}
32
+
33
+ if self.TYPE_DEFINITIONS_FIELD not in self.json_dict:
34
+ raise GdsDictionaryParsingException(
35
+ f"Ground Dictionary missing '{self.TYPE_DEFINITIONS_FIELD}' field: {str(self.json_file)}"
36
+ )
37
+
38
+ for type_def in self.json_dict[self.TYPE_DEFINITIONS_FIELD]:
39
+ try:
40
+ if type_def["kind"] == "alias":
41
+ name = str(type_def["qualifiedName"])
42
+ # Only consider names with the pattern Fw*Type
43
+ if name.startswith("Fw") and name.endswith("Type"):
44
+ name_dict[type_def["qualifiedName"]] = type_def["underlyingType"]["name"]
45
+ except KeyError as e:
46
+ raise GdsDictionaryParsingException(
47
+ f"{str(e)} key missing from Type Definition dictionary entry: {str(type_def)}"
48
+ )
49
+
50
+ return (
51
+ dict(sorted(id_dict.items())),
52
+ dict(sorted(name_dict.items())),
53
+ self.get_versions(),
54
+ )
@@ -0,0 +1,125 @@
1
+ """
2
+ pkt_json_loader.py:
3
+
4
+ Loads flight dictionary (JSON) and returns Python dictionaries of telemetry packets
5
+
6
+ @author jawest
7
+ """
8
+
9
+ from fprime_gds.common.templates.pkt_template import PktTemplate
10
+ from fprime_gds.common.loaders.json_loader import JsonLoader
11
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
12
+
13
+
14
+ class PktJsonLoader(JsonLoader):
15
+ """Class to load python based telemetry packet dictionaries"""
16
+
17
+ PACKETS_FIELD = "telemetryPacketSets"
18
+
19
+ SET_NAME = "name"
20
+ MEMBERS = "members"
21
+
22
+ def get_packet_set_names(self, path):
23
+ """ Get the list of packet sets """
24
+ return [packet_set[self.SET_NAME] for packet_set in self.json_dict[self.PACKETS_FIELD]]
25
+
26
+ def get_id_dict(self, path, packet_set_name: str, ch_name_dict: dict):
27
+ if path in self.saved_dicts and packet_set_name in self.saved_dicts[path]:
28
+ (id_dict, name_dict) = self.saved_dicts[path][packet_set_name]
29
+ else:
30
+ (id_dict, name_dict, self.versions) = self.construct_dicts(packet_set_name, ch_name_dict)
31
+ if path not in self.saved_dicts:
32
+ self.saved_dicts[path] = dict()
33
+ self.saved_dicts[path].update({packet_set_name: (id_dict, name_dict)})
34
+
35
+ return id_dict
36
+
37
+ def get_name_dict(self, path, packet_set_name: str, ch_name_dict: dict):
38
+ if path in self.saved_dicts and packet_set_name in self.saved_dicts[path]:
39
+ (id_dict, name_dict) = self.saved_dicts[path][packet_set_name]
40
+ else:
41
+ (id_dict, name_dict, self.versions) = self.construct_dicts(packet_set_name, ch_name_dict)
42
+ if path not in self.saved_dicts:
43
+ self.saved_dicts[path] = dict()
44
+ self.saved_dicts[path].update({packet_set_name: (id_dict, name_dict)})
45
+
46
+ return name_dict
47
+
48
+
49
+ def construct_dicts(self, packet_set_name: str, ch_name_dict: dict):
50
+ """
51
+ Constructs and returns python dictionaries keyed on id and name
52
+
53
+ This function should not be called directly, instead, use
54
+ get_id_dict(path) and get_name_dict(path)
55
+
56
+ Args:
57
+ ch_name_dict (dict()): Channel dictionary with names as keys and
58
+ ChTemplate objects as values.
59
+
60
+ Returns:
61
+ A tuple with two packet dictionaries (type==dict()):
62
+ (id_dict, name_dict) and the dictionary version. The keys of the packet dictionaries should
63
+ be the packets' id and name fields respectively and the values should be PktTemplate objects.
64
+ """
65
+ id_dict = {}
66
+ name_dict = {}
67
+
68
+ if self.PACKETS_FIELD not in self.json_dict:
69
+ raise GdsDictionaryParsingException(
70
+ f"Ground Dictionary missing '{self.PACKETS_FIELD}' field: {str(self.json_file)}"
71
+ )
72
+
73
+ for packet_dict in self.json_dict[self.PACKETS_FIELD]:
74
+ try:
75
+ if packet_set_name == packet_dict[self.SET_NAME]:
76
+ for packet_group_dict in packet_dict.get(self.MEMBERS, []):
77
+ packet_temp = self.construct_template_from_dict(packet_group_dict, ch_name_dict)
78
+ id_dict[packet_temp.get_id()] = packet_temp
79
+ name_dict[packet_temp.get_name()] = packet_temp
80
+
81
+ return (
82
+ dict(sorted(id_dict.items())),
83
+ dict(sorted(name_dict.items())),
84
+ self.get_versions(),
85
+ )
86
+
87
+ except KeyError as e:
88
+ raise GdsDictionaryParsingException(
89
+ f"{str(e)} key missing from telemetry packet dictionary entry: {str(packet_dict)}"
90
+ )
91
+
92
+ raise GdsDictionaryParsingException(
93
+ f"Ground Dictionary does not contain packet set '{packet_set_name}'"
94
+ )
95
+
96
+ def construct_template_from_dict(self, packet_group_dict: dict, ch_name_dict: dict):
97
+ """
98
+ Args:
99
+ packet_group_dict (dict()): Packet group dictionary with group id, name, and members
100
+ ch_name_dict (dict()): Channel dictionary with names as keys and ChTemplate objects as values.
101
+ Returns:
102
+ A a PktTemplate object containing the packet group id, group name, and list of ChTemplate
103
+ objects that represent each member in the packet.
104
+ """
105
+ try:
106
+ ch_list = []
107
+ group_name = packet_group_dict["name"]
108
+ group_id = packet_group_dict["id"]
109
+ group_members = packet_group_dict["members"]
110
+
111
+ for ch_name in group_members:
112
+ ch_template = ch_name_dict[ch_name]
113
+ ch_list.append(ch_template)
114
+
115
+ except KeyError as e:
116
+ raise GdsDictionaryParsingException(
117
+ f"{str(e)} key missing from telemetry packet member or member is not a channel in the dictionary: {str(group_name)}"
118
+ )
119
+
120
+ return PktTemplate(
121
+ group_id,
122
+ group_name,
123
+ ch_list
124
+ )
125
+
@@ -0,0 +1,85 @@
1
+ """
2
+ prm_json_loader.py:
3
+
4
+ Loads flight dictionary (JSON) and returns id and mnemonic based Python dictionaries of params
5
+
6
+ @author zimri.leisher
7
+ """
8
+
9
+ from fprime_gds.common.templates.prm_template import PrmTemplate
10
+ from fprime_gds.common.loaders.json_loader import JsonLoader
11
+ from fprime_gds.common.data_types.exceptions import GdsDictionaryParsingException
12
+
13
+
14
+ class PrmJsonLoader(JsonLoader):
15
+ """Class to load parameters from json dictionaries"""
16
+
17
+ PARAMS_FIELD = "parameters"
18
+
19
+ ID = "id"
20
+ NAME = "name"
21
+ TYPE = "type"
22
+ DESC = "annotation"
23
+ DEFAULT = "default"
24
+
25
+
26
+ def construct_dicts(self, _):
27
+ """
28
+ Constructs and returns python dictionaries keyed on id and name
29
+
30
+ Args:
31
+ _: Unused argument (inherited)
32
+ Returns:
33
+ A tuple with two channel dictionaries (python type dict):
34
+ (id_dict, fqn_name_dict). The keys should be the channels' id and
35
+ fully qualified name fields respectively and the values should be PrmTemplate
36
+ objects.
37
+ """
38
+ id_dict = {}
39
+ fqn_name_dict = {}
40
+
41
+ if self.PARAMS_FIELD not in self.json_dict:
42
+ raise GdsDictionaryParsingException(
43
+ f"Ground Dictionary missing '{self.PARAMS_FIELD}' field: {str(self.json_file)}"
44
+ )
45
+
46
+ for prm_dict in self.json_dict[self.PARAMS_FIELD]:
47
+ # Create a channel template object
48
+ prm_temp = self.construct_template_from_dict(prm_dict)
49
+
50
+ id_dict[prm_temp.get_id()] = prm_temp
51
+ fqn_name_dict[prm_temp.get_full_name()] = prm_temp
52
+
53
+ return (
54
+ dict(sorted(id_dict.items())),
55
+ dict(sorted(fqn_name_dict.items())),
56
+ self.get_versions(),
57
+ )
58
+
59
+ def construct_template_from_dict(self, prm_dict: dict) -> PrmTemplate:
60
+ try:
61
+ prm_id = prm_dict[self.ID]
62
+ # The below assignment also raises a ValueError if the name does not contain a '.'
63
+ qualified_component_name, prm_name = prm_dict[self.NAME].rsplit('.', 1)
64
+ if not qualified_component_name or not prm_name:
65
+ raise ValueError()
66
+
67
+ type_obj = self.parse_type(prm_dict[self.TYPE])
68
+ except ValueError as e:
69
+ raise GdsDictionaryParsingException(
70
+ f"Parameter dictionary entry malformed, expected name of the form '<QUAL_COMP_NAME>.<PRM_NAME>' in : {str(prm_dict)}"
71
+ )
72
+ except KeyError as e:
73
+ raise GdsDictionaryParsingException(
74
+ f"{str(e)} key missing from parameter dictionary entry or its associated type in the dictionary: {str(prm_dict)}"
75
+ )
76
+
77
+ prm_default_val = prm_dict.get(self.DEFAULT, None)
78
+
79
+ return PrmTemplate(
80
+ prm_id,
81
+ prm_name,
82
+ qualified_component_name,
83
+ type_obj,
84
+ prm_default_val
85
+ )