fprime-gds 4.0.1__py3-none-any.whl → 4.0.2a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fprime_gds/common/decoders/ch_decoder.py +1 -1
- fprime_gds/common/decoders/event_decoder.py +1 -1
- fprime_gds/common/decoders/pkt_decoder.py +1 -1
- fprime_gds/common/distributor/distributor.py +1 -1
- fprime_gds/common/encoders/cmd_encoder.py +1 -1
- fprime_gds/common/encoders/encoder.py +1 -1
- fprime_gds/common/encoders/seq_writer.py +1 -1
- fprime_gds/common/gds_cli/base_commands.py +1 -1
- fprime_gds/common/gds_cli/channels.py +1 -1
- fprime_gds/common/gds_cli/command_send.py +1 -1
- fprime_gds/common/gds_cli/events.py +1 -1
- fprime_gds/common/handlers.py +56 -0
- fprime_gds/common/{pipeline → models}/dictionaries.py +1 -0
- fprime_gds/common/pipeline/publishing.py +111 -0
- fprime_gds/common/pipeline/standard.py +9 -22
- fprime_gds/common/testing_fw/api.py +86 -23
- fprime_gds/common/utils/config_manager.py +1 -1
- fprime_gds/executables/apps.py +59 -14
- fprime_gds/executables/cli.py +34 -5
- fprime_gds/executables/dictionary_merge.py +206 -0
- fprime_gds/executables/fprime_cli.py +1 -1
- fprime_gds/executables/run_deployment.py +19 -6
- {fprime_gds-4.0.1.dist-info → fprime_gds-4.0.2a2.dist-info}/METADATA +1 -1
- {fprime_gds-4.0.1.dist-info → fprime_gds-4.0.2a2.dist-info}/RECORD +29 -29
- {fprime_gds-4.0.1.dist-info → fprime_gds-4.0.2a2.dist-info}/entry_points.txt +1 -0
- fprime_gds/common/models/common/channel_telemetry.py +0 -174
- fprime_gds/common/models/common/event.py +0 -121
- {fprime_gds-4.0.1.dist-info → fprime_gds-4.0.2a2.dist-info}/WHEEL +0 -0
- {fprime_gds-4.0.1.dist-info → fprime_gds-4.0.2a2.dist-info}/licenses/LICENSE.txt +0 -0
- {fprime_gds-4.0.1.dist-info → fprime_gds-4.0.2a2.dist-info}/licenses/NOTICE.txt +0 -0
- {fprime_gds-4.0.1.dist-info → fprime_gds-4.0.2a2.dist-info}/top_level.txt +0 -0
fprime_gds/executables/apps.py
CHANGED
@@ -14,8 +14,8 @@ command line that will be spun into its own process.
|
|
14
14
|
import subprocess
|
15
15
|
import sys
|
16
16
|
from abc import ABC, abstractmethod
|
17
|
-
|
18
|
-
from typing import final, List, Dict, Tuple, Type
|
17
|
+
import argparse
|
18
|
+
from typing import final, List, Dict, Tuple, Type, Optional
|
19
19
|
|
20
20
|
from fprime_gds.plugin.definitions import gds_plugin_specification, gds_plugin
|
21
21
|
from fprime_gds.plugin.system import Plugins
|
@@ -23,10 +23,13 @@ from fprime_gds.executables.cli import (
|
|
23
23
|
CompositeParser,
|
24
24
|
ParserBase,
|
25
25
|
BareArgumentParser,
|
26
|
+
MiddleWareParser,
|
27
|
+
DictionaryParser,
|
26
28
|
StandardPipelineParser,
|
27
29
|
PluginArgumentParser,
|
28
30
|
)
|
29
31
|
from fprime_gds.common.pipeline.standard import StandardPipeline
|
32
|
+
from fprime_gds.common.pipeline.publishing import PublishingPipeline
|
30
33
|
|
31
34
|
|
32
35
|
class GdsBaseFunction(ABC):
|
@@ -40,7 +43,7 @@ class GdsBaseFunction(ABC):
|
|
40
43
|
"""
|
41
44
|
|
42
45
|
@abstractmethod
|
43
|
-
def run(self):
|
46
|
+
def run(self, parsed_args):
|
44
47
|
"""Run the start-up function
|
45
48
|
|
46
49
|
Run the start-up function unconstrained by the limitations of running in a dedicated subprocess.
|
@@ -110,13 +113,13 @@ class GdsApp(GdsBaseFunction):
|
|
110
113
|
self.process = None
|
111
114
|
self.arguments = arguments
|
112
115
|
|
113
|
-
def run(self):
|
116
|
+
def run(self, parsed_args):
|
114
117
|
"""Run the application as an isolated process
|
115
118
|
|
116
119
|
GdsFunction objects require an implementation of the `run` command. This implementation will take the arguments
|
117
120
|
provided from `get_process_invocation` function and supplies them as an invocation of the isolated subprocess.
|
118
121
|
"""
|
119
|
-
invocation_arguments = self.get_process_invocation()
|
122
|
+
invocation_arguments = self.get_process_invocation(parsed_args)
|
120
123
|
self.process = subprocess.Popen(invocation_arguments)
|
121
124
|
|
122
125
|
def wait(self, timeout=None):
|
@@ -137,7 +140,9 @@ class GdsApp(GdsBaseFunction):
|
|
137
140
|
return self.process.returncode
|
138
141
|
|
139
142
|
@abstractmethod
|
140
|
-
def get_process_invocation(
|
143
|
+
def get_process_invocation(
|
144
|
+
self, namespace: Optional[argparse.Namespace] = None
|
145
|
+
) -> List[str]:
|
141
146
|
"""Run the start-up function
|
142
147
|
|
143
148
|
Run the start-up function unconstrained by the limitations of running in a dedicated subprocess.
|
@@ -199,6 +204,18 @@ class GdsStandardApp(GdsApp):
|
|
199
204
|
dictionary of flag tuple to argparse kwargs
|
200
205
|
"""
|
201
206
|
return {}
|
207
|
+
|
208
|
+
@classmethod
|
209
|
+
def get_additional_cli_parsers(cls) -> List[ParserBase]:
|
210
|
+
""" Supply a list of CLI parser objects
|
211
|
+
|
212
|
+
Supply a list of CLI parser objects to the CLI system. This allows use of full ParserBase objects instead of
|
213
|
+
the more restrictive dictionary approach seen in get_additional_arguments.
|
214
|
+
|
215
|
+
Returns:
|
216
|
+
list of parser objects as passed to ParserBase
|
217
|
+
"""
|
218
|
+
return []
|
202
219
|
|
203
220
|
@classmethod
|
204
221
|
def init(cls):
|
@@ -230,7 +247,7 @@ class GdsStandardApp(GdsApp):
|
|
230
247
|
"""Start function to contain behavior based in standard pipeline"""
|
231
248
|
raise NotImplementedError()
|
232
249
|
|
233
|
-
def get_process_invocation(self):
|
250
|
+
def get_process_invocation(self, namespace=None):
|
234
251
|
"""Return the process invocation for this class' main
|
235
252
|
|
236
253
|
The process invocation of this application is to run cls.main and supply it a reproduced version of the
|
@@ -247,7 +264,8 @@ class GdsStandardApp(GdsApp):
|
|
247
264
|
composite_parser = CompositeParser(
|
248
265
|
[self.get_cli_parser(), StandardPipelineParser]
|
249
266
|
)
|
250
|
-
namespace
|
267
|
+
if namespace is None:
|
268
|
+
namespace, _, _ = ParserBase.parse_known_args([composite_parser], client=True)
|
251
269
|
args = composite_parser.reproduce_cli_args(namespace)
|
252
270
|
return [sys.executable, "-c", f"import {module}\n{module}.{cls}.main()"] + args
|
253
271
|
|
@@ -261,32 +279,39 @@ class GdsStandardApp(GdsApp):
|
|
261
279
|
[]
|
262
280
|
) # Disable plugin system unless specified through init
|
263
281
|
# In the case where `init` sets up the plugin system, we want to pass the assertion
|
264
|
-
# triggered by the code above that turns it off in the not-setup case.
|
282
|
+
# triggered by the code above that turns it off in the not-setup case.
|
265
283
|
except AssertionError:
|
266
284
|
pass
|
267
285
|
plugin_name = getattr(cls, "get_name", lambda: cls.__name__)()
|
286
|
+
plugin_composite = CompositeParser([cls.get_cli_parser()] + cls.get_additional_cli_parsers())
|
287
|
+
|
268
288
|
parsed_arguments, _ = ParserBase.parse_args(
|
269
|
-
[
|
289
|
+
[ StandardPipelineParser, PluginArgumentParser, plugin_composite],
|
270
290
|
f"{plugin_name}: a standard app plugin",
|
291
|
+
client=True,
|
271
292
|
)
|
272
293
|
pipeline = StandardPipeline()
|
273
|
-
# Turn off history and
|
294
|
+
# Turn off history, file handling, and logging
|
274
295
|
pipeline.histories.implementation = None
|
275
296
|
pipeline.filing = None
|
297
|
+
parsed_arguments.disable_data_logging = True
|
276
298
|
pipeline = StandardPipelineParser.pipeline_factory(
|
277
299
|
parsed_arguments, pipeline
|
278
300
|
)
|
279
301
|
application = cls(
|
280
|
-
**cls.get_cli_parser().extract_arguments(parsed_arguments)
|
302
|
+
**cls.get_cli_parser().extract_arguments(parsed_arguments),
|
303
|
+
namespace=parsed_arguments,
|
304
|
+
|
281
305
|
)
|
282
306
|
application.start(pipeline)
|
283
307
|
sys.exit(0)
|
284
308
|
except Exception as e:
|
285
309
|
print(f"[ERROR] Error launching {cls.__name__}: {e}", file=sys.stderr)
|
286
|
-
raise
|
287
310
|
sys.exit(148)
|
288
311
|
|
289
312
|
|
313
|
+
|
314
|
+
|
290
315
|
@gds_plugin(GdsApp)
|
291
316
|
class CustomDataHandlers(GdsStandardApp):
|
292
317
|
"""Run an app that registers all custom data handlers
|
@@ -294,10 +319,25 @@ class CustomDataHandlers(GdsStandardApp):
|
|
294
319
|
A GdsApp plugin, built using the GdsStandardApp helper, that uses the provided standard pipeline to register each
|
295
320
|
custom DataHandler plugin as a consumer of the appropriate type.
|
296
321
|
"""
|
322
|
+
PLUGIN_PARSER = CompositeParser([MiddleWareParser, DictionaryParser])
|
297
323
|
|
298
|
-
def __init__(self, **kwargs):
|
324
|
+
def __init__(self, namespace, **kwargs):
|
299
325
|
"""Required __init__ implementation"""
|
300
326
|
super().__init__(**kwargs)
|
327
|
+
self.connection_transport = namespace.connection_transport
|
328
|
+
self.connection_uri = namespace.connection_uri
|
329
|
+
self.dictionaries = namespace.dictionaries
|
330
|
+
|
331
|
+
@classmethod
|
332
|
+
def get_additional_arguments(cls):
|
333
|
+
""" Supplies additional arguments needed """
|
334
|
+
return {}
|
335
|
+
|
336
|
+
@classmethod
|
337
|
+
def get_additional_cli_parsers(cls):
|
338
|
+
""" Requires MiddleWareParser and Dictionary Parser"""
|
339
|
+
return [cls.PLUGIN_PARSER]
|
340
|
+
|
301
341
|
|
302
342
|
@classmethod
|
303
343
|
def init(cls):
|
@@ -312,10 +352,15 @@ class CustomDataHandlers(GdsStandardApp):
|
|
312
352
|
"FW_PACKET_FILE": pipeline.coders.register_file_consumer,
|
313
353
|
"FW_PACKET_PACKETIZED_TLM": pipeline.coders.register_packet_consumer,
|
314
354
|
}
|
355
|
+
self.publisher = PublishingPipeline()
|
356
|
+
self.publisher.transport_implementation = self.connection_transport
|
357
|
+
self.publisher.setup(self.dictionaries)
|
358
|
+
self.publisher.connect(self.connection_uri)
|
315
359
|
|
316
360
|
data_handlers = Plugins.system().get_feature_classes("data_handler")
|
317
361
|
for data_handler_class in data_handlers:
|
318
362
|
data_handler = data_handler_class()
|
363
|
+
data_handler.set_publisher(self.publisher)
|
319
364
|
descriptors = data_handler.get_handled_descriptors()
|
320
365
|
for descriptor in descriptors:
|
321
366
|
DESCRIPTOR_TO_FUNCTION.get(descriptor, lambda discard: discard)(
|
fprime_gds/executables/cli.py
CHANGED
@@ -27,9 +27,9 @@ from pathlib import Path
|
|
27
27
|
from typing import Any, Dict, List, Tuple
|
28
28
|
|
29
29
|
# Required to set the checksum as a module variable
|
30
|
-
import fprime_gds.common.communication.checksum
|
31
30
|
import fprime_gds.common.logger
|
32
31
|
from fprime_gds.common.communication.adapters.ip import check_port
|
32
|
+
from fprime_gds.common.models.dictionaries import Dictionaries
|
33
33
|
from fprime_gds.common.pipeline.standard import StandardPipeline
|
34
34
|
from fprime_gds.common.transport import ThreadedTCPSocketClient
|
35
35
|
from fprime_gds.common.utils.config_manager import ConfigManager
|
@@ -176,6 +176,18 @@ class ParserBase(ABC):
|
|
176
176
|
]
|
177
177
|
return list(itertools.chain.from_iterable(cli_pairs))
|
178
178
|
|
179
|
+
def handle_values(self, values: Dict[str, Any]):
|
180
|
+
"""Post-process the parser's arguments in dictionary form
|
181
|
+
|
182
|
+
Handle arguments from the given parser in dictionary form. This will convert to/from the namespace and then
|
183
|
+
delegate to handle_arguments.
|
184
|
+
|
185
|
+
Args:
|
186
|
+
args: arguments namespace of processed arguments
|
187
|
+
Returns: dictionary with processed results of arguments.
|
188
|
+
"""
|
189
|
+
return vars(self.handle_arguments(args=argparse.Namespace(**values), kwargs={}))
|
190
|
+
|
179
191
|
@abstractmethod
|
180
192
|
def handle_arguments(self, args, **kwargs):
|
181
193
|
"""Post-process the parser's arguments
|
@@ -851,6 +863,11 @@ class LogDeployParser(ParserBase):
|
|
851
863
|
"default": "INFO",
|
852
864
|
"help": "Set the logging level of GDS processes [default: %(default)s]",
|
853
865
|
},
|
866
|
+
("--disable-data-logging",): {
|
867
|
+
"action": "store_true",
|
868
|
+
"default": False,
|
869
|
+
"help": "Disable logging of each data item",
|
870
|
+
},
|
854
871
|
}
|
855
872
|
|
856
873
|
def handle_arguments(self, args, **kwargs):
|
@@ -1002,6 +1019,19 @@ class DictionaryParser(DetectionParser):
|
|
1002
1019
|
elif args.dictionary is None:
|
1003
1020
|
args = super().handle_arguments(args, **kwargs)
|
1004
1021
|
args.dictionary = find_dict(args.deployment)
|
1022
|
+
|
1023
|
+
# Setup dictionaries encoders and decoders
|
1024
|
+
dictionaries = Dictionaries()
|
1025
|
+
|
1026
|
+
dictionaries.load_dictionaries(
|
1027
|
+
args.dictionary, args.packet_spec, args.packet_set_name
|
1028
|
+
)
|
1029
|
+
config = ConfigManager.get_instance()
|
1030
|
+
# Update config to use Fw types defined in the JSON dictionary
|
1031
|
+
if dictionaries.fw_type_name:
|
1032
|
+
for fw_type_name, fw_type in dictionaries.fw_type_name.items():
|
1033
|
+
config.set("types", fw_type_name, fw_type)
|
1034
|
+
args.dictionaries = dictionaries
|
1005
1035
|
return args
|
1006
1036
|
|
1007
1037
|
|
@@ -1065,12 +1095,11 @@ class StandardPipelineParser(CompositeParser):
|
|
1065
1095
|
def pipeline_factory(args_ns, pipeline=None) -> StandardPipeline:
|
1066
1096
|
"""A factory of the standard pipeline given the handled arguments"""
|
1067
1097
|
pipeline_arguments = {
|
1068
|
-
"config": ConfigManager(),
|
1069
|
-
"
|
1098
|
+
"config": ConfigManager.get_instance(),
|
1099
|
+
"dictionaries": args_ns.dictionaries,
|
1070
1100
|
"file_store": args_ns.files_storage_directory,
|
1071
|
-
"packet_spec": args_ns.packet_spec,
|
1072
|
-
"packet_set_name": args_ns.packet_set_name,
|
1073
1101
|
"logging_prefix": args_ns.logs,
|
1102
|
+
"data_logging_enabled": not args_ns.disable_data_logging
|
1074
1103
|
}
|
1075
1104
|
pipeline = pipeline if pipeline else StandardPipeline()
|
1076
1105
|
pipeline.transport_implementation = args_ns.connection_transport
|
@@ -0,0 +1,206 @@
|
|
1
|
+
""" fprime_gds.executables.dictionary_merge: script to merge two F Prime dictionaries """
|
2
|
+
|
3
|
+
import argparse
|
4
|
+
import functools
|
5
|
+
import json
|
6
|
+
import re
|
7
|
+
import sys
|
8
|
+
from pathlib import Path
|
9
|
+
|
10
|
+
|
11
|
+
def validate_metadata(metadata1, metadata2):
|
12
|
+
""" Check consistency between metadata blocks
|
13
|
+
|
14
|
+
The JSON dictionary has multiple fields in the metadata block. This function will check that there is consistency
|
15
|
+
between these two blocks.
|
16
|
+
|
17
|
+
Args:
|
18
|
+
metadata1: metadata from the first dictionary
|
19
|
+
metadata2: metadata from the second dictionary
|
20
|
+
"""
|
21
|
+
for field in ["projectVersion", "frameworkVersion", "dictionarySpecVersion"]:
|
22
|
+
value1 = metadata1[field]
|
23
|
+
value2 = metadata2[field]
|
24
|
+
if value1 != value2:
|
25
|
+
raise ValueError(f"Inconsistent metadata values for field '{field}'. ({value1} vs {value2})")
|
26
|
+
|
27
|
+
def validate_non_unique(non_unique1, non_unique2):
|
28
|
+
""" Validate non-unique definitions are consistent between dictionaries """
|
29
|
+
indexed_non_unique1 = {value.get("qualifiedName"): value for value in non_unique1}
|
30
|
+
|
31
|
+
for value2 in non_unique2:
|
32
|
+
value1 = indexed_non_unique1.get(value2["qualifiedName"], None)
|
33
|
+
if value1 is not None and value1 != value2:
|
34
|
+
raise ValueError(f"'{value2['qualifiedName']}' has inconsistent definitions")
|
35
|
+
|
36
|
+
def validate_unique(unique1, unique2):
|
37
|
+
""" Validate unique definitions have no duplication """
|
38
|
+
ids = {item.get("id", item.get("opcode", "")) for item in unique1}
|
39
|
+
names = {item.get("name") for item in unique1}
|
40
|
+
|
41
|
+
|
42
|
+
for value2 in unique2:
|
43
|
+
name = value2['name']
|
44
|
+
id = value2.get("id", value2.get("opcode", ""))
|
45
|
+
if name in names:
|
46
|
+
raise ValueError(f"'{name}' appears in both dictionaries")
|
47
|
+
if id and id in ids:
|
48
|
+
raise ValueError(f"ID/Opcode {id} used in both dictionaries")
|
49
|
+
|
50
|
+
|
51
|
+
def merge_metadata(meta1, meta2, name=None, permissive=False):
|
52
|
+
""" Merge JSON dictionary metadata blocks
|
53
|
+
|
54
|
+
The JSON dictionary starts with a metadata block. This function will merge the two metadata blocks preferring the
|
55
|
+
first when there is a discrepancy. 'name' will be supplied as the new name defaulting to "name1_name2_merged" when
|
56
|
+
not supplied. If 'permissive' is true, version discrepancies will be ignored otherwise this will throw a ValueError
|
57
|
+
if the versions do not match.
|
58
|
+
|
59
|
+
Args:
|
60
|
+
meta1: first metadata block
|
61
|
+
meta2: second metadata block
|
62
|
+
name: (optional) name for the new dictionary (Default: meta.name_meta2.name_merged)
|
63
|
+
permissive: (optional) True to allow version miss-matching. (Default: False)
|
64
|
+
Return:
|
65
|
+
merged metadata block
|
66
|
+
Throws:
|
67
|
+
ValueError on version miss-match without the permissive flag
|
68
|
+
"""
|
69
|
+
if not permissive:
|
70
|
+
validate_metadata(meta1, meta2)
|
71
|
+
if name is None:
|
72
|
+
name = f"{meta1.get('deploymentName', 'unknown')}_{meta2.get('deploymentName', 'unknown')}_merged"
|
73
|
+
return {
|
74
|
+
**meta1,
|
75
|
+
**{
|
76
|
+
"deploymentName": name
|
77
|
+
}
|
78
|
+
}
|
79
|
+
|
80
|
+
def merge_lists(list1, list2, validator):
|
81
|
+
""" Merge list-like entities
|
82
|
+
|
83
|
+
This will merge two list-like entities using the supplied validator.
|
84
|
+
|
85
|
+
Args:
|
86
|
+
list1: first list-like
|
87
|
+
list2: second list-like
|
88
|
+
validator: validate the lists are consistent or non-colliding
|
89
|
+
|
90
|
+
"""
|
91
|
+
validator(list1, list2)
|
92
|
+
singular = {item.get("qualifiedName", item.get("name", "")): item for item in list1 + list2}
|
93
|
+
return list(singular.values())
|
94
|
+
|
95
|
+
def merge_non_unique(non_unique1, non_unique2):
|
96
|
+
""" Merge the non-unique blocks in JSON dictionaries
|
97
|
+
|
98
|
+
JSON dictionaries have some non-unique definitions (e.g. "typeDefinitions") that must be merged ensuring
|
99
|
+
consistency but ignoring duplication. This function will create a superset of the two blocks. Inconsistent
|
100
|
+
definitions will result in a ValueError.
|
101
|
+
|
102
|
+
Args:
|
103
|
+
non_unique1: first non unique block
|
104
|
+
non_unique2: second non unique block
|
105
|
+
"""
|
106
|
+
return merge_lists(non_unique1, non_unique2, validate_non_unique)
|
107
|
+
|
108
|
+
|
109
|
+
def merge_unique(unique1, unique2):
|
110
|
+
""" Merge the unique blocks in JSON dictionaries
|
111
|
+
|
112
|
+
JSON dictionaries have some unique definitions (e.g. "eventDefinitions") that must be merged ensuring that entries
|
113
|
+
are not duplicated between the sets. This function will create a superset of the two blocks. Duplicated definitions
|
114
|
+
will result in a ValueError.
|
115
|
+
|
116
|
+
Args:
|
117
|
+
unique1: first unique block
|
118
|
+
unique2: second unique block
|
119
|
+
"""
|
120
|
+
return merge_lists(unique1, unique2, validate_unique)
|
121
|
+
|
122
|
+
|
123
|
+
def merge_dictionaries(dictionary1, dictionary2, name=None, permissive=False):
|
124
|
+
""" Merge two dictionaries
|
125
|
+
|
126
|
+
This will merge two JSON dictionaries' major top-level sections. Unknown fields will be preserved preferring
|
127
|
+
dictionary1's content for unknown fields.
|
128
|
+
|
129
|
+
Args:
|
130
|
+
dictionary1: dictionary 1's content
|
131
|
+
dictionary2: dictionary 2's content
|
132
|
+
name: new 'deploymentName' field
|
133
|
+
permissive: allow miss-matched dictionary versions
|
134
|
+
|
135
|
+
Return: merged dictionaries
|
136
|
+
|
137
|
+
"""
|
138
|
+
merge_metadata_fn = functools.partial(merge_metadata, name=name, permissive=permissive)
|
139
|
+
|
140
|
+
stages = [
|
141
|
+
("metadata", merge_metadata_fn),
|
142
|
+
("typeDefinitions", merge_non_unique),
|
143
|
+
("constants", merge_non_unique),
|
144
|
+
("commands", merge_unique),
|
145
|
+
("parameters", merge_unique),
|
146
|
+
("events", merge_unique),
|
147
|
+
("telemetryChannels", merge_unique),
|
148
|
+
("records", merge_unique),
|
149
|
+
("containers", merge_unique),
|
150
|
+
("telemetryPacketSets", merge_unique),
|
151
|
+
|
152
|
+
]
|
153
|
+
|
154
|
+
merged = {**dictionary2, **dictionary1}
|
155
|
+
for field, merger in stages:
|
156
|
+
object1 = dictionary1[field]
|
157
|
+
object2 = dictionary2[field]
|
158
|
+
try:
|
159
|
+
merged[field] = merger(object1, object2)
|
160
|
+
except ValueError as value_error:
|
161
|
+
raise ValueError(f"Merging '{field}' failed. {value_error}")
|
162
|
+
except KeyError as key_error:
|
163
|
+
raise ValueError(f"Malformed dictionary section '{field}'. Missing key: {key_error}")
|
164
|
+
return merged
|
165
|
+
|
166
|
+
def parse_arguments():
|
167
|
+
""" Parse arguments for this script """
|
168
|
+
parser = argparse.ArgumentParser(description="Merge two dictionaries")
|
169
|
+
parser.add_argument("--name", type=str, default=None, help="Name to use as the new 'deploymentName' field")
|
170
|
+
parser.add_argument("--output", type=Path, default=Path("MergedAppDictionary.json"),
|
171
|
+
help="Output dictionary path. Default: MergedAppDictionary.json")
|
172
|
+
parser.add_argument("--permissive", action="store_true", default=False,
|
173
|
+
help="Ignore discrepancies between dictionaries")
|
174
|
+
parser.add_argument("dictionary1", type=Path, help="Primary dictionary to merge")
|
175
|
+
parser.add_argument("dictionary2", type=Path, help="Secondary dictionary to merge")
|
176
|
+
|
177
|
+
args = parser.parse_args()
|
178
|
+
|
179
|
+
# Validate arguments
|
180
|
+
if args.name is not None and not re.match("[a-zA-Z_][a-zA-Z_0-9]*"):
|
181
|
+
raise ValueError(f"--name '{args.name}' is an invalid identifier")
|
182
|
+
if not args.dictionary1.exists():
|
183
|
+
raise ValueError(f"'{args.dictionary1}' does not exist")
|
184
|
+
if not args.dictionary2.exists():
|
185
|
+
raise ValueError(f"'{args.dictionary2}' does not exist")
|
186
|
+
return args
|
187
|
+
|
188
|
+
def main():
|
189
|
+
""" Main entry point """
|
190
|
+
try:
|
191
|
+
args = parse_arguments()
|
192
|
+
# Open dictionaries
|
193
|
+
with open(args.dictionary1, "r") as dictionary1_fh:
|
194
|
+
dictionary1 = json.load(dictionary1_fh)
|
195
|
+
with open(args.dictionary2, "r") as dictionary2_fh:
|
196
|
+
dictionary2 = json.load(dictionary2_fh)
|
197
|
+
output = merge_dictionaries(dictionary1, dictionary2, args.name, args.permissive)
|
198
|
+
with open(args.output, "w") as output_fh:
|
199
|
+
json.dump(output, output_fh, indent=2)
|
200
|
+
except Exception as exception:
|
201
|
+
print(f"[ERROR] {exception}", file=sys.stderr)
|
202
|
+
sys.exit(1)
|
203
|
+
sys.exit(0)
|
204
|
+
|
205
|
+
if __name__ == "__main__":
|
206
|
+
main()
|
@@ -227,7 +227,7 @@ class CommandSubparserInjector(CliSubparserInjectorBase):
|
|
227
227
|
argcomplete.warn("No dictionary found to get command names from")
|
228
228
|
return []
|
229
229
|
|
230
|
-
from fprime_gds.common.
|
230
|
+
from fprime_gds.common.models.dictionaries import Dictionaries
|
231
231
|
|
232
232
|
dictionary = Dictionaries()
|
233
233
|
dictionary.load_dictionaries(dict_path, None, None)
|
@@ -5,6 +5,7 @@
|
|
5
5
|
####
|
6
6
|
import os
|
7
7
|
import sys
|
8
|
+
import copy
|
8
9
|
import webbrowser
|
9
10
|
|
10
11
|
from fprime_gds.executables.cli import (
|
@@ -38,7 +39,9 @@ def parse_args():
|
|
38
39
|
PluginArgumentParser,
|
39
40
|
]
|
40
41
|
# Parse the arguments, and refine through all handlers
|
41
|
-
args, parser = ConfigDrivenParser.parse_args(
|
42
|
+
args, parser = ConfigDrivenParser.parse_args(
|
43
|
+
arg_handlers, "Run F prime deployment and GDS"
|
44
|
+
)
|
42
45
|
return args
|
43
46
|
|
44
47
|
|
@@ -175,11 +178,21 @@ def launch_comm(parsed_args):
|
|
175
178
|
)
|
176
179
|
|
177
180
|
|
178
|
-
def launch_plugin(plugin_class_instance):
|
181
|
+
def launch_plugin(parsed_args, plugin_class_instance):
|
179
182
|
"""Launch a plugin instance"""
|
180
|
-
plugin_name = getattr(
|
183
|
+
plugin_name = getattr(
|
184
|
+
plugin_class_instance,
|
185
|
+
"get_name",
|
186
|
+
lambda: plugin_class_instance.__class__.__name__,
|
187
|
+
)()
|
188
|
+
plugin_args = copy.deepcopy(parsed_args)
|
189
|
+
# Set logging to use a subdirectory within the root logs directory
|
190
|
+
plugin_logs = os.path.join(plugin_args.logs, plugin_name)
|
191
|
+
os.mkdir(plugin_logs)
|
192
|
+
plugin_args.logs = plugin_logs
|
193
|
+
plugin_args.log_directly = True
|
181
194
|
return launch_process(
|
182
|
-
plugin_class_instance.get_process_invocation(),
|
195
|
+
plugin_class_instance.get_process_invocation(plugin_args),
|
183
196
|
name=f"{ plugin_name } Plugin App",
|
184
197
|
launch_time=1,
|
185
198
|
)
|
@@ -218,11 +231,11 @@ def main():
|
|
218
231
|
try:
|
219
232
|
procs = [launcher(parsed_args) for launcher in launchers]
|
220
233
|
_ = [
|
221
|
-
launch_plugin(cls())
|
234
|
+
launch_plugin(parsed_args, cls(namespace=parsed_args))
|
222
235
|
for cls in Plugins.system().get_feature_classes("gds_app")
|
223
236
|
]
|
224
237
|
_ = [
|
225
|
-
instance().run()
|
238
|
+
instance().run(parsed_args)
|
226
239
|
for instance in Plugins.system().get_feature_classes("gds_function")
|
227
240
|
]
|
228
241
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: fprime-gds
|
3
|
-
Version: 4.0.
|
3
|
+
Version: 4.0.2a2
|
4
4
|
Summary: F Prime Flight Software Ground Data System layer
|
5
5
|
Author-email: Michael Starch <Michael.D.Starch@jpl.nasa.gov>, Thomas Boyer-Chammard <Thomas.Boyer.Chammard@jpl.nasa.gov>
|
6
6
|
License:
|