fprime-gds 3.6.2a1__py3-none-any.whl → 4.0.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. fprime_gds/common/communication/adapters/uart.py +34 -25
  2. fprime_gds/common/decoders/ch_decoder.py +1 -1
  3. fprime_gds/common/decoders/event_decoder.py +2 -1
  4. fprime_gds/common/decoders/pkt_decoder.py +1 -1
  5. fprime_gds/common/distributor/distributor.py +2 -2
  6. fprime_gds/common/encoders/ch_encoder.py +2 -2
  7. fprime_gds/common/encoders/cmd_encoder.py +2 -2
  8. fprime_gds/common/encoders/event_encoder.py +2 -2
  9. fprime_gds/common/encoders/pkt_encoder.py +2 -2
  10. fprime_gds/common/encoders/seq_writer.py +2 -2
  11. fprime_gds/common/fpy/__init__.py +0 -0
  12. fprime_gds/common/fpy/serialize_bytecode.py +229 -0
  13. fprime_gds/common/fpy/types.py +203 -0
  14. fprime_gds/common/gds_cli/base_commands.py +1 -1
  15. fprime_gds/common/handlers.py +39 -0
  16. fprime_gds/common/loaders/fw_type_json_loader.py +54 -0
  17. fprime_gds/common/loaders/pkt_json_loader.py +121 -0
  18. fprime_gds/common/loaders/prm_json_loader.py +85 -0
  19. fprime_gds/common/pipeline/dictionaries.py +21 -4
  20. fprime_gds/common/pipeline/encoding.py +19 -0
  21. fprime_gds/common/pipeline/histories.py +4 -0
  22. fprime_gds/common/pipeline/standard.py +16 -2
  23. fprime_gds/common/templates/prm_template.py +81 -0
  24. fprime_gds/common/testing_fw/api.py +42 -0
  25. fprime_gds/common/testing_fw/pytest_integration.py +25 -2
  26. fprime_gds/common/tools/README.md +34 -0
  27. fprime_gds/common/tools/params.py +246 -0
  28. fprime_gds/common/utils/config_manager.py +6 -6
  29. fprime_gds/executables/apps.py +184 -11
  30. fprime_gds/executables/cli.py +443 -125
  31. fprime_gds/executables/comm.py +5 -2
  32. fprime_gds/executables/fprime_cli.py +3 -3
  33. fprime_gds/executables/run_deployment.py +12 -4
  34. fprime_gds/flask/static/js/vue-support/channel.js +1 -1
  35. fprime_gds/flask/static/js/vue-support/event.js +1 -1
  36. fprime_gds/plugin/definitions.py +86 -8
  37. fprime_gds/plugin/system.py +171 -58
  38. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/METADATA +18 -19
  39. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/RECORD +44 -35
  40. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/WHEEL +1 -1
  41. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/entry_points.txt +2 -0
  42. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info/licenses}/LICENSE.txt +0 -0
  43. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info/licenses}/NOTICE.txt +0 -0
  44. {fprime_gds-3.6.2a1.dist-info → fprime_gds-4.0.0a2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,34 @@
1
+ # FPrime GDS tools
2
+ ## fprime-prm-write
3
+ ### JSON file reference
4
+ JSON files for the `fprime-prm-write` tool should take the following form:
5
+ ```json
6
+ {
7
+ "componentInstanceOne": {
8
+ "parameterNameOne": "parameter value",
9
+ "parameterNameTwo": ["a", "b", "c"],
10
+ "parameterNameThree": {
11
+ "complexValue": [123, 456]
12
+ }
13
+ },
14
+ "componentInstanceTwo": {
15
+ "parameterNameFour": true
16
+ }
17
+ }
18
+ ```
19
+ The JSON should consist of a key-value map of component instance names to an inner key-value map. The inner key-value map should consist of parameter name-to-value map entries. The parameter values support complex FPrime types, such as nested structs, arrays or enum constants. Structs are instantiated with key-value maps, where the keys are the field names and the values are the field values. Arrays are just JSON arrays, and enum constants are represented as strings.
20
+
21
+ ### How to Initialize a ParamDB .dat File
22
+
23
+ The `fprime-prm-write` tool can be used to create a `.dat` file compatible with the `PrmDb` component from a json file. To use, create a compatible JSON file as defined in the JSON File Reference above, and pass it in to the tool using the `dat` subcommand, like so:
24
+ ```
25
+ fprime-prm-write dat <json file> --dictionary <path to compiled FPrime dict>
26
+ ```
27
+ You should then have a `.dat` file which can be passed in to the `PrmDb`. Note, this `.dat` file will only have entries for the parameters specified in the JSON file. If you want it to instead have a value for all parameters which have a default value, you can add the `--defaults` option. Then, the generated `.dat` file will essentially reset all parameters back to default, except for those specified in the JSON file.
28
+
29
+ ### How to Create a .seq File From a Parameter JSON File
30
+ Sometimes, you may want to update parameters while the FPrime application is running. This can be accomplished with a sequence of `_PRM_SET` commands, which the `fprime-prm-write` tool can automatically create for you. To use, create a compatible JSON file as defined in the JSON File Reference above, and pass it in to the tool using the `seq` subcommand, like so:
31
+ ```
32
+ fprime-prm-write seq <json file> --dictionary <path to compiled FPrime dict>
33
+ ```
34
+ You should then have a `.seq` file which can be compiled and executed by the `CmdSequencer`.
@@ -0,0 +1,246 @@
1
+ # author: zimri.leisher
2
+ # created on: Jan 27, 2025
3
+
4
+ # allow us to use bracketed types
5
+ from __future__ import annotations
6
+ import json as js
7
+ from pathlib import Path
8
+ from argparse import ArgumentParser
9
+ from typing import Any
10
+ from fprime_gds.common.loaders.prm_json_loader import PrmJsonLoader
11
+ from fprime_gds.common.templates.prm_template import PrmTemplate
12
+ from fprime.common.models.serialize.type_base import BaseType
13
+ from fprime.common.models.serialize.array_type import ArrayType
14
+ from fprime.common.models.serialize.bool_type import BoolType
15
+ from fprime.common.models.serialize.enum_type import EnumType
16
+ from fprime.common.models.serialize.numerical_types import (
17
+ F32Type,
18
+ F64Type,
19
+ I8Type,
20
+ I16Type,
21
+ I32Type,
22
+ I64Type,
23
+ U8Type,
24
+ U16Type,
25
+ U32Type,
26
+ U64Type,
27
+ )
28
+ from fprime.common.models.serialize.serializable_type import SerializableType
29
+ from fprime.common.models.serialize.string_type import StringType
30
+
31
+ FW_PRM_ID_TYPE_SIZE = 4 # serialized size of the FwPrmIdType
32
+
33
+
34
+ def instantiate_prm_type(prm_val_json, prm_type: type[BaseType]):
35
+ """given a parameter type and its value in json form, instantiate the type
36
+ with the value, or raise an exception if the json is not compatible"""
37
+ prm_instance = prm_type()
38
+ if isinstance(prm_instance, BoolType):
39
+ value = str(prm_val_json).lower().strip()
40
+ if value in {"true", "yes"}:
41
+ av = True
42
+ elif value in {"false", "no"}:
43
+ av = False
44
+ else:
45
+ raise RuntimeError("Param value is not a valid boolean")
46
+ prm_instance.val = av
47
+ elif isinstance(prm_instance, EnumType):
48
+ prm_instance.val = prm_val_json
49
+ elif isinstance(prm_instance, (F64Type, F32Type)):
50
+ prm_instance.val = float(prm_val_json)
51
+ elif isinstance(
52
+ prm_instance,
53
+ (I64Type, U64Type, I32Type, U32Type, I16Type, U16Type, I8Type, U8Type),
54
+ ):
55
+ prm_instance.val = int(prm_val_json, 0) if isinstance(prm_val_json, str) else int(prm_val_json)
56
+ elif isinstance(prm_instance, StringType):
57
+ prm_instance.val = prm_val_json
58
+ elif isinstance(prm_instance, (ArrayType, SerializableType)):
59
+ prm_instance.val = prm_val_json
60
+ else:
61
+ raise RuntimeError(
62
+ "Param value could not be converted to type object"
63
+ )
64
+ return prm_instance
65
+
66
+
67
+ def parsed_json_to_dat(templates_and_values: list[tuple[PrmTemplate, Any]]) -> bytes:
68
+ """convert a list of (PrmTemplate, prm value json) to serialized bytes for a PrmDb"""
69
+ serialized = bytes()
70
+ for template_and_value in templates_and_values:
71
+ template, json_value = template_and_value
72
+ prm_instance = instantiate_prm_type(json_value, template.prm_type_obj)
73
+
74
+ prm_instance_bytes = prm_instance.serialize()
75
+
76
+ # see https://github.com/nasa/fprime/blob/devel/Svc/PrmDb/docs/sdd.md#32-functional-description
77
+ # for an explanation of the binary format of parameters in the .dat file
78
+
79
+ # delimiter
80
+ serialized += b"\xA5"
81
+
82
+ record_size = FW_PRM_ID_TYPE_SIZE + len(prm_instance_bytes)
83
+
84
+ # size of following data
85
+ serialized += record_size.to_bytes(length=4, byteorder="big")
86
+ # id of param
87
+ serialized += template.prm_id.to_bytes(length=4, byteorder="big")
88
+ # value of param
89
+ serialized += prm_instance_bytes
90
+ return serialized
91
+
92
+
93
+ def parsed_json_to_seq(templates_and_values: list[tuple[PrmTemplate, dict]], include_save=False) -> list[str]:
94
+ """convert a list of (PrmTemplate, prm value json) to a command sequence for the CmdSequencer.
95
+ Returns a list of lines in the sequence."""
96
+ cmds = []
97
+ cmds.append("; Autocoded sequence file from JSON")
98
+ for template_and_value in templates_and_values:
99
+ template, json_value = template_and_value
100
+ set_cmd_name = template.comp_name + "." + template.prm_name.upper() + "_PRM_SET"
101
+ cmd = "R00:00:00 " + set_cmd_name + " " + str(json_value)
102
+ cmds.append(cmd)
103
+ if include_save:
104
+ save_cmd = template.comp_name + "." + template.prm_name.upper() + "_PRM_SAVE"
105
+ cmds.append(save_cmd)
106
+ return cmds
107
+
108
+
109
+
110
+ def parse_json(param_value_json, name_dict: dict[str, PrmTemplate], include_implicit_defaults=False) -> list[tuple[PrmTemplate, dict]]:
111
+ """
112
+ param_value_json: the json object read from the .json file
113
+ name_dict: a dictionary of (fqn param name, PrmTemplate) pairs
114
+ include_implicit_defaults: whether or not to also include default values from the name dict
115
+ if no value was specified in the json
116
+ @return a list of tuple of param template and the intended param value (in form of json dict)
117
+ """
118
+ # first, check the json for errors
119
+ for component_name in param_value_json:
120
+ for param_name in param_value_json[component_name]:
121
+ fqn_param_name = component_name + "." + param_name
122
+ param_temp: PrmTemplate = name_dict.get(fqn_param_name, None)
123
+ if not param_temp:
124
+ raise RuntimeError(
125
+ "Unable to find param "
126
+ + fqn_param_name
127
+ + " in dictionary"
128
+ )
129
+
130
+ # okay, now iterate over the dict
131
+ templates_to_values = []
132
+ for fqn_param_name, prm_template in name_dict.items():
133
+
134
+ prm_val = None
135
+
136
+ if include_implicit_defaults:
137
+ # there is a default value
138
+ prm_val = prm_template.prm_default_val
139
+
140
+ comp_json = param_value_json.get(prm_template.comp_name, None)
141
+ if comp_json:
142
+ # if there is an entry for the component
143
+ if prm_template.prm_name in comp_json:
144
+ # if there is an entry for this param
145
+ # get the value
146
+ prm_val = comp_json[prm_template.prm_name]
147
+
148
+ if not prm_val:
149
+ # not writing a val for this prm
150
+ continue
151
+
152
+ templates_to_values.append((prm_template, prm_val))
153
+
154
+ return templates_to_values
155
+
156
+
157
+ def main():
158
+ arg_parser = ArgumentParser()
159
+ subparsers = arg_parser.add_subparsers(dest="subcmd", required=True)
160
+
161
+
162
+ json_to_dat = subparsers.add_parser("dat", help="Compiles .json files into param DB .dat files")
163
+ json_to_dat.add_argument(
164
+ "json_file", type=Path, help="The .json file to turn into a .dat file", default=None
165
+ )
166
+ json_to_dat.add_argument(
167
+ "--dictionary",
168
+ "-d",
169
+ type=Path,
170
+ help="The dictionary file of the FSW",
171
+ required=True,
172
+ )
173
+ json_to_dat.add_argument("--defaults", action="store_true", help="Whether or not to implicitly include default parameter values in the output")
174
+ json_to_dat.add_argument("--output", "-o", type=Path, help="The output file", default=None)
175
+
176
+
177
+ json_to_seq = subparsers.add_parser("seq", help="Converts .json files into command sequence .seq files")
178
+ json_to_seq.add_argument(
179
+ "json_file", type=Path, help="The .json file to turn into a .seq file", default=None
180
+ )
181
+ json_to_seq.add_argument(
182
+ "--dictionary",
183
+ "-d",
184
+ type=Path,
185
+ help="The dictionary file of the FSW",
186
+ required=True,
187
+ )
188
+ json_to_seq.add_argument("--defaults", action="store_true", help="Whether or not to implicitly include default parameter values in the output")
189
+ json_to_seq.add_argument("--save", action="store_true", help="Whether or not to include the PRM_SAVE cmd in the output")
190
+ json_to_seq.add_argument("--output", "-o", type=Path, help="The output file", default=None)
191
+
192
+
193
+ args = arg_parser.parse_args()
194
+
195
+ if args.json_file is None or not args.json_file.exists():
196
+ print("Unable to find", args.json_file)
197
+ exit(1)
198
+
199
+ if args.json_file.is_dir():
200
+ print("json-file is a dir", args.json_file)
201
+ exit(1)
202
+
203
+ if not args.dictionary.exists():
204
+ print("Unable to find", args.dictionary)
205
+ exit(1)
206
+
207
+ output_format = args.subcmd
208
+
209
+ # just compile the one file in place
210
+ if args.output is None:
211
+ output_path = args.json_file.with_suffix("." + output_format)
212
+ else:
213
+ output_path = args.output
214
+
215
+ convert_json(args.json_file, args.dictionary, output_path, output_format, args.defaults, args.save)
216
+
217
+
218
+ def convert_json(json_file: Path, dictionary: Path, output: Path, output_format: str, implicit_defaults=False, include_save_cmd=False):
219
+
220
+ print("Converting", json_file, "to", output, "(format: ." + output_format + ")")
221
+ output.parent.mkdir(parents=True, exist_ok=True)
222
+
223
+ json = js.loads(json_file.read_text())
224
+
225
+ dict_parser = PrmJsonLoader(str(dictionary.resolve()))
226
+ id_dict, name_dict, versions = dict_parser.construct_dicts(
227
+ str(dictionary.resolve())
228
+ )
229
+
230
+ templates_to_values = parse_json(json, name_dict, implicit_defaults)
231
+
232
+ if output_format == "dat":
233
+ serialized_values = parsed_json_to_dat(templates_to_values)
234
+
235
+ print("Done, writing to", output.resolve())
236
+ output.write_bytes(serialized_values)
237
+ elif output_format == "seq":
238
+ sequence_cmds = parsed_json_to_seq(templates_to_values, include_save_cmd)
239
+ print("Done, writing to", output.resolve())
240
+ output.write_text("\n".join(sequence_cmds))
241
+ else:
242
+ raise RuntimeError("Invalid output format " + str(output_format))
243
+
244
+
245
+ if __name__ == "__main__":
246
+ main()
@@ -114,7 +114,7 @@ class ConfigManager(configparser.ConfigParser):
114
114
  return U16Type()
115
115
  if type_str == "U32":
116
116
  return U32Type()
117
- if type_str == "u64":
117
+ if type_str == "U64":
118
118
  return U64Type()
119
119
  if type_str == "I8":
120
120
  return I8Type()
@@ -153,11 +153,11 @@ class ConfigManager(configparser.ConfigParser):
153
153
 
154
154
  self.__prop["types"] = {
155
155
  "msg_len": "U32",
156
- "msg_desc": "U32",
157
- "ch_id": "U32",
158
- "event_id": "U32",
159
- "op_code": "U32",
160
- "pkt_id": "U16",
156
+ "FwPacketDescriptorType": "U32",
157
+ "FwChanIdType": "U32",
158
+ "FwEventIdType": "U32",
159
+ "FwOpcodeType": "U32",
160
+ "FwTlmPacketizeIdType": "U16",
161
161
  "key_val": "U16",
162
162
  }
163
163
  self._set_section_defaults("types")
@@ -1,4 +1,4 @@
1
- """ fprime_gds.executables.apps: an implementation of start-up apps in fprime
1
+ """fprime_gds.executables.apps: an implementation of start-up apps in fprime
2
2
 
3
3
  There are twp ways to approach start=up applications in fprime. First, is to implement a run method via a subclass of
4
4
  `GdsFunction`. This gives the implementor the ability to run anything within the run function that python offers,
@@ -10,15 +10,27 @@ command line that will be spun into its own process.
10
10
 
11
11
  @author lestarch
12
12
  """
13
+
13
14
  import subprocess
15
+ import sys
14
16
  from abc import ABC, abstractmethod
15
- from typing import List, Type
17
+ from argparse import Namespace
18
+ from typing import final, List, Dict, Tuple, Type
16
19
 
17
- from fprime_gds.plugin.definitions import gds_plugin_specification
20
+ from fprime_gds.plugin.definitions import gds_plugin_specification, gds_plugin
21
+ from fprime_gds.plugin.system import Plugins
22
+ from fprime_gds.executables.cli import (
23
+ CompositeParser,
24
+ ParserBase,
25
+ BareArgumentParser,
26
+ StandardPipelineParser,
27
+ PluginArgumentParser,
28
+ )
29
+ from fprime_gds.common.pipeline.standard import StandardPipeline
18
30
 
19
31
 
20
32
  class GdsBaseFunction(ABC):
21
- """ Base functionality for pluggable GDS start-up functions
33
+ """Base functionality for pluggable GDS start-up functions
22
34
 
23
35
  GDS start-up functionality is pluggable. This class acts as a base for pluggable functionality supplies helpers to
24
36
  the various start-up plugins.
@@ -29,7 +41,7 @@ class GdsBaseFunction(ABC):
29
41
 
30
42
  @abstractmethod
31
43
  def run(self):
32
- """ Run the start-up function
44
+ """Run the start-up function
33
45
 
34
46
  Run the start-up function unconstrained by the limitations of running in a dedicated subprocess.
35
47
 
@@ -38,7 +50,7 @@ class GdsBaseFunction(ABC):
38
50
 
39
51
 
40
52
  class GdsFunction(GdsBaseFunction, ABC):
41
- """ Functionality for pluggable GDS start-up functions
53
+ """Functionality for pluggable GDS start-up functions
42
54
 
43
55
  GDS start-up functionality is pluggable. This class acts as a wide-open implementation of functionality via a single
44
56
  `run` callback. Developers have complete control of the start-up functionality. However, this comes at the cost of
@@ -74,7 +86,7 @@ class GdsFunction(GdsBaseFunction, ABC):
74
86
 
75
87
 
76
88
  class GdsApp(GdsBaseFunction):
77
- """ GDS start-up process functionality
89
+ """GDS start-up process functionality
78
90
 
79
91
  A pluggable base class used to start a new process as part of the GDS command line invocation. This allows
80
92
  developers to add process-isolated functionality to the GDS network.
@@ -86,8 +98,9 @@ class GdsApp(GdsBaseFunction):
86
98
  Standard plug-in functions (get_name, get_arguments) are available should the implementer desire these features.
87
99
  Arguments will be supplied to the class's `__init__` function.
88
100
  """
101
+
89
102
  def __init__(self, **arguments):
90
- """ Construct the communication applications around the arguments
103
+ """Construct the communication applications around the arguments
91
104
 
92
105
  Command line arguments are passed in to match those returned from the `get_arguments` functions.
93
106
 
@@ -98,7 +111,7 @@ class GdsApp(GdsBaseFunction):
98
111
  self.arguments = arguments
99
112
 
100
113
  def run(self):
101
- """ Run the application as an isolated process
114
+ """Run the application as an isolated process
102
115
 
103
116
  GdsFunction objects require an implementation of the `run` command. This implementation will take the arguments
104
117
  provided from `get_process_invocation` function and supplies them as an invocation of the isolated subprocess.
@@ -107,7 +120,7 @@ class GdsApp(GdsBaseFunction):
107
120
  self.process = subprocess.Popen(invocation_arguments)
108
121
 
109
122
  def wait(self, timeout=None):
110
- """ Wait for the app to complete then return the return code
123
+ """Wait for the app to complete then return the return code
111
124
 
112
125
  Waits (blocking) for the process to complete. Then returns the return code of the underlying process. If timeout
113
126
  is non-None then the process will be killed after waiting for the timeout and another wait of timeout will be
@@ -125,7 +138,7 @@ class GdsApp(GdsBaseFunction):
125
138
 
126
139
  @abstractmethod
127
140
  def get_process_invocation(self) -> List[str]:
128
- """ Run the start-up function
141
+ """Run the start-up function
129
142
 
130
143
  Run the start-up function unconstrained by the limitations of running in a dedicated subprocess.
131
144
 
@@ -148,3 +161,163 @@ class GdsApp(GdsBaseFunction):
148
161
  GdsApp subclass
149
162
  """
150
163
  raise NotImplementedError()
164
+
165
+
166
+ class GdsStandardApp(GdsApp):
167
+ """Standard GDS application that is built upon the StandardPipeline
168
+
169
+ Use this class to help build a GdsApp plugin that has a known invocation and starts up the standard pipeline to
170
+ enable standard GDS processes.
171
+
172
+ Developers should implement a concrete subclass with the `start(pipeline)` function to run the application with the
173
+ supplied pipeline. The subclass must supply **kwargs parent class constructor and extend a GdsApp plugin:
174
+
175
+ ```
176
+ @gds_plugin(GdsApp)
177
+ class MyStandardApp(GdsStandardApp):
178
+ def __init__(self, **kwargs):
179
+ super().__init__(**kwargs)
180
+ ...
181
+ ```
182
+
183
+ If the plugin requires more arguments beyond the standard pipeline arguments, supply those additional arguments via
184
+ the `get_additional_arguments` method.
185
+ """
186
+
187
+ def __init__(self, **kwargs):
188
+ """Take all arguments and store them"""
189
+ super().__init__(**kwargs)
190
+
191
+ @classmethod
192
+ def get_additional_arguments(cls) -> Dict[Tuple, Dict[str, str]]:
193
+ """Function to provide additional command line arguments beyond the standard pipeline
194
+
195
+ Override this function to provide additional arguments. The form of the arguments are the same as returned by
196
+ standard plugins: a dictionary of tuple flags to argparse kwargs inputs.
197
+
198
+ Return:
199
+ dictionary of flag tuple to argparse kwargs
200
+ """
201
+ return {}
202
+
203
+ @classmethod
204
+ def init(cls):
205
+ """Allows standard application plugins to initialize before argument parsing is performed"""
206
+ pass
207
+
208
+ @final
209
+ @classmethod
210
+ def get_arguments(cls):
211
+ """Get the arguments for this plugin
212
+
213
+ This will return the combined arguments needed for the standard pipeline, and those returned from
214
+ `get_additional_arguments()`.
215
+ """
216
+ return {
217
+ **cls.get_additional_arguments(),
218
+ **StandardPipelineParser().get_arguments(),
219
+ }
220
+
221
+ @classmethod
222
+ def get_cli_parser(cls):
223
+ """Helper to get a parser for this applications' additional arguments"""
224
+ return BareArgumentParser(
225
+ cls.get_additional_arguments(), getattr(cls, "check_arguments", None)
226
+ )
227
+
228
+ @abstractmethod
229
+ def start(self, pipeline: StandardPipeline):
230
+ """Start function to contain behavior based in standard pipeline"""
231
+ raise NotImplementedError()
232
+
233
+ def get_process_invocation(self):
234
+ """Return the process invocation for this class' main
235
+
236
+ The process invocation of this application is to run cls.main and supply it a reproduced version of the
237
+ arguments needed for the given parsers. When main is loaded, it will dispatch to the sub-classing plugin's
238
+ start method. The subclassing plugin will already have had the arguments supplied via the PluginParser's
239
+ construction of plugin objects.
240
+ """
241
+ cls = self.__class__.__name__
242
+ module = self.__class__.__module__
243
+
244
+ namespace = Namespace(**self.arguments)
245
+ args = CompositeParser(
246
+ [self.get_cli_parser(), StandardPipelineParser]
247
+ ).reproduce_cli_args(namespace)
248
+ return [sys.executable, "-c", f"import {module}\n{module}.{cls}.main()"] + args
249
+
250
+ @classmethod
251
+ def main(cls):
252
+ """Main function used as a generic entrypoint for GdsStandardApp derived GdsApp plugins"""
253
+ try:
254
+ cls.init()
255
+ try:
256
+ Plugins.system(
257
+ []
258
+ ) # Disable plugin system unless specified through init
259
+ # In the case where `init` sets up the plugin system, we want to pass the assertion
260
+ # triggered by the code above that turns it off in the not-setup case.
261
+ except AssertionError:
262
+ pass
263
+ parsed_arguments, _ = ParserBase.parse_args(
264
+ [cls.get_cli_parser(), StandardPipelineParser, PluginArgumentParser],
265
+ f"{cls.get_name()}: a standard app plugin",
266
+ )
267
+ pipeline = StandardPipeline()
268
+ # Turn off history and filing
269
+ pipeline.histories.implementation = None
270
+ pipeline.filing = None
271
+ pipeline = StandardPipelineParser.pipeline_factory(
272
+ parsed_arguments, pipeline
273
+ )
274
+ application = cls(
275
+ **cls.get_cli_parser().extract_arguments(parsed_arguments)
276
+ )
277
+ application.start(pipeline)
278
+ sys.exit(0)
279
+ except Exception as e:
280
+ print(f"[ERROR] Error launching {cls.__name__}: {e}", file=sys.stderr)
281
+ raise
282
+ sys.exit(148)
283
+
284
+
285
+ @gds_plugin(GdsApp)
286
+ class CustomDataHandlers(GdsStandardApp):
287
+ """Run an app that registers all custom data handlers
288
+
289
+ A GdsApp plugin, built using the GdsStandardApp helper, that uses the provided standard pipeline to register each
290
+ custom DataHandler plugin as a consumer of the appropriate type.
291
+ """
292
+
293
+ def __init__(self, **kwargs):
294
+ """Required __init__ implementation"""
295
+ super().__init__(**kwargs)
296
+
297
+ @classmethod
298
+ def init(cls):
299
+ """Set up the system to use only data_handler plugins"""
300
+ Plugins.system(["data_handler"])
301
+
302
+ def start(self, pipeline: StandardPipeline):
303
+ """Iterates over each data handler, registering to the producing decoder"""
304
+ DESCRIPTOR_TO_FUNCTION = {
305
+ "FW_PACKET_TELEM": pipeline.coders.register_channel_consumer,
306
+ "FW_PACKET_LOG": pipeline.coders.register_event_consumer,
307
+ "FW_PACKET_FILE": pipeline.coders.register_file_consumer,
308
+ "FW_PACKET_PACKETIZED_TLM": pipeline.coders.register_packet_consumer,
309
+ }
310
+
311
+ data_handlers = Plugins.system().get_feature_classes("data_handler")
312
+ for data_handler_class in data_handlers:
313
+ data_handler = data_handler_class()
314
+ descriptors = data_handler.get_handled_descriptors()
315
+ for descriptor in descriptors:
316
+ DESCRIPTOR_TO_FUNCTION.get(descriptor, lambda discard: discard)(
317
+ data_handler
318
+ )
319
+
320
+ @classmethod
321
+ def get_name(cls):
322
+ """Return the name of this application"""
323
+ return "custom-data-handlers-app"