fprime-gds 4.0.0a10__py3-none-any.whl → 4.0.2a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -13,6 +13,7 @@ descriptor header will be passed on to the registered objects.
13
13
 
14
14
  @bug No known bugs
15
15
  """
16
+
16
17
  import logging
17
18
 
18
19
  from fprime_gds.common.decoders.decoder import DecodingException
@@ -193,18 +194,23 @@ class Distributor(DataHandler):
193
194
  self.__buf.extend(data)
194
195
 
195
196
  (leftover_data, raw_msgs) = self.parse_into_raw_msgs_api(self.__buf)
196
- assert leftover_data == self.__buf, "Leftover data is not equivalent to the remaining data in buffer"
197
+ assert (
198
+ leftover_data == self.__buf
199
+ ), "Leftover data is not equivalent to the remaining data in buffer"
197
200
 
198
201
  for raw_msg in raw_msgs:
199
202
  (length, data_desc, msg) = self.parse_raw_msg_api(raw_msg)
200
203
 
201
204
  data_desc_key = data_desc_type.DataDescType(data_desc).name
202
- for d in self.__decoders[data_desc_key]:
205
+ decoders = self.__decoders[data_desc_key]
206
+
207
+ if not decoders:
208
+ LOGGER.warning(f"No decoder registered for: {data_desc_key}")
209
+
210
+ for d in decoders:
203
211
  try:
204
212
  d.data_callback(msg)
205
213
  except DecodingException as dexc:
206
- LOGGER.warning("Decoding error: %s", dexc)
214
+ LOGGER.warning(f"Decoding error: {dexc}")
207
215
  except Exception as exc:
208
- LOGGER.warning("Parsing error: %s", exc)
209
- else:
210
- LOGGER.warning("No decoder registered for: %s", data_desc_type.DataDescType(data_desc).name)
216
+ LOGGER.warning(f"Parsing error: {exc}")
@@ -63,6 +63,7 @@ class StandardPipeline:
63
63
  logging_prefix=None,
64
64
  packet_spec=None,
65
65
  packet_set_name=None,
66
+ data_logging_enabled=True
66
67
  ):
67
68
  """
68
69
  Setup the standard pipeline for moving data from the middleware layer through the GDS layers using the standard
@@ -113,7 +114,7 @@ class StandardPipeline:
113
114
  # Register distributor to client socket
114
115
  self.client_socket.register(self.distributor)
115
116
  # Final setup step is to make a logging directory, and register in the logger
116
- if logging_prefix:
117
+ if logging_prefix and data_logging_enabled:
117
118
  self.setup_logging(logging_prefix)
118
119
 
119
120
  @property
@@ -7,6 +7,7 @@ telemetry and dictionaries.
7
7
 
8
8
  :author: koran
9
9
  """
10
+
10
11
  import signal
11
12
  import time
12
13
  from pathlib import Path
@@ -71,11 +72,12 @@ class IntegrationTestAPI(DataHandler):
71
72
  # Copy dictionaries and binary file to output directory
72
73
  if logpath is not None:
73
74
  base_dir = Path(self.pipeline.dictionary_path).parents[1]
74
- for subdir in ['bin', 'dict']:
75
+ for subdir in ["bin", "dict"]:
75
76
  dir_path = base_dir / subdir
76
77
  if dir_path.is_dir():
77
- shutil.copytree(dir_path, Path(logpath) / subdir,
78
- dirs_exist_ok=True)
78
+ shutil.copytree(
79
+ dir_path, Path(logpath) / subdir, dirs_exist_ok=True
80
+ )
79
81
 
80
82
  # A predicate used as a filter to choose which events to log automatically
81
83
  self.event_log_filter = self.get_event_pred()
@@ -84,7 +86,7 @@ class IntegrationTestAPI(DataHandler):
84
86
  self.last_evr = None
85
87
 
86
88
  def setup(self):
87
- """ Set up the API, assumes pipeline is now setup """
89
+ """Set up the API, assumes pipeline is now setup"""
88
90
  self.pipeline.coders.register_event_consumer(self)
89
91
 
90
92
  def teardown(self):
@@ -162,10 +164,10 @@ class IntegrationTestAPI(DataHandler):
162
164
  fail_color = TestLogger.ORANGE
163
165
 
164
166
  if value:
165
- ast_msg = f'{ast_msg} succeeded: {msg}'
167
+ ast_msg = f"{ast_msg} succeeded: {msg}"
166
168
  self.__log(ast_msg, TestLogger.GREEN)
167
169
  else:
168
- ast_msg = f'{ast_msg} failed: {msg}'
170
+ ast_msg = f"{ast_msg} failed: {msg}"
169
171
  self.__log(ast_msg, fail_color)
170
172
 
171
173
  if not expect:
@@ -209,7 +211,7 @@ class IntegrationTestAPI(DataHandler):
209
211
  self.event_history.clear()
210
212
  self.telemetry_history.clear()
211
213
  msg = "Clearing Test Histories"
212
-
214
+
213
215
  self.__log(msg, TestLogger.WHITE)
214
216
  self.command_history.clear()
215
217
 
@@ -239,9 +241,9 @@ class IntegrationTestAPI(DataHandler):
239
241
  dictionary = str(self.pipeline.dictionary_path)
240
242
 
241
243
  try:
242
- with open(dictionary, 'r') as file:
244
+ with open(dictionary, "r") as file:
243
245
  data = json.load(file)
244
- return data['metadata'].get("deploymentName")
246
+ return data["metadata"].get("deploymentName")
245
247
  except FileNotFoundError:
246
248
  msg = f"Error: File not found at path: {dictionary}"
247
249
  self.__log(msg, TestLogger.YELLOW)
@@ -273,7 +275,7 @@ class IntegrationTestAPI(DataHandler):
273
275
  count, channels=channels, history=history, start=start, timeout=timeout
274
276
  )
275
277
  if not result:
276
- msg = f'Failed to detect any data flow for {timeout} s.'
278
+ msg = f"Failed to detect any data flow for {timeout} s."
277
279
  self.__log(msg, TestLogger.RED)
278
280
  assert False, msg
279
281
  self.remove_telemetry_subhistory(history)
@@ -300,7 +302,7 @@ class IntegrationTestAPI(DataHandler):
300
302
  config_file = self.get_config_file_path()
301
303
 
302
304
  try:
303
- with open(config_file, 'r') as file:
305
+ with open(config_file, "r") as file:
304
306
  result = json.load(file)
305
307
  return result
306
308
  except FileNotFoundError:
@@ -351,7 +353,7 @@ class IntegrationTestAPI(DataHandler):
351
353
  if data:
352
354
  try:
353
355
  filepath = data["Svc.PrmDb.filename"]
354
- if filepath.startswith('/'):
356
+ if filepath.startswith("/"):
355
357
  return filepath
356
358
  else:
357
359
  msg = f"Error: {filepath} did not start with a forward slash"
@@ -549,7 +551,15 @@ class IntegrationTestAPI(DataHandler):
549
551
  return self.await_event_sequence(events, start=start, timeout=timeout)
550
552
  return self.await_event(events, start=start, timeout=timeout)
551
553
 
552
- def send_and_assert_command(self, command, args=[], max_delay=None, timeout=5, events=None, commander="cmdDisp"):
554
+ def send_and_assert_command(
555
+ self,
556
+ command,
557
+ args=[],
558
+ max_delay=None,
559
+ timeout=5,
560
+ events=None,
561
+ commander="cmdDisp",
562
+ ):
553
563
  """
554
564
  This helper will send a command and verify that the command was dispatched and completed
555
565
  within the F' deployment. This helper can retroactively check that the delay between
@@ -566,7 +576,9 @@ class IntegrationTestAPI(DataHandler):
566
576
  returns a list of the EventData objects found by the search
567
577
  """
568
578
  cmd_id = self.translate_command_name(command)
569
- dispatch = [self.get_event_pred(f"{commander}.OpCodeDispatched", [cmd_id, None])]
579
+ dispatch = [
580
+ self.get_event_pred(f"{commander}.OpCodeDispatched", [cmd_id, None])
581
+ ]
570
582
  complete = [self.get_event_pred(f"{commander}.OpCodeCompleted", [cmd_id])]
571
583
  events = dispatch + (events if events else []) + complete
572
584
  results = self.send_and_assert_event(command, args, events, timeout=timeout)
@@ -576,7 +588,6 @@ class IntegrationTestAPI(DataHandler):
576
588
  assert delay < max_delay, msg
577
589
  return results
578
590
 
579
-
580
591
  ######################################################################################
581
592
  # Command Asserts
582
593
  ######################################################################################
@@ -652,14 +663,18 @@ class IntegrationTestAPI(DataHandler):
652
663
  """
653
664
  if isinstance(channel, str):
654
665
  ch_dict = self.pipeline.dictionaries.channel_name
655
- matching = [ch_dict[name].get_id() for name in ch_dict.keys() if name.endswith(f".{channel}")]
666
+ matching = [
667
+ ch_dict[name].get_id()
668
+ for name in ch_dict.keys()
669
+ if name.endswith(f".{channel}")
670
+ ]
656
671
  if channel in ch_dict:
657
672
  return ch_dict[channel].get_id()
658
673
  if force_component or not matching:
659
674
  msg = f"The telemetry mnemonic, {channel}, wasn't in the dictionary"
660
675
  raise KeyError(msg)
661
676
  return matching
662
-
677
+
663
678
  ch_dict = self.pipeline.dictionaries.channel_id
664
679
  if channel in ch_dict:
665
680
  return channel
@@ -689,7 +704,11 @@ class IntegrationTestAPI(DataHandler):
689
704
 
690
705
  if not predicates.is_predicate(channel) and channel is not None:
691
706
  channel = self.translate_telemetry_name(channel, force_component=False)
692
- channel = predicates.is_a_member_of(channel) if isinstance(channel, list) else predicates.equal_to(channel)
707
+ channel = (
708
+ predicates.is_a_member_of(channel)
709
+ if isinstance(channel, list)
710
+ else predicates.equal_to(channel)
711
+ )
693
712
 
694
713
  if not predicates.is_predicate(value) and value is not None:
695
714
  value = predicates.equal_to(value)
@@ -873,7 +892,11 @@ class IntegrationTestAPI(DataHandler):
873
892
  """
874
893
  if isinstance(event, str):
875
894
  event_dict = self.pipeline.dictionaries.event_name
876
- matching = [event_dict[name].get_id() for name in event_dict.keys() if name.endswith(f".{event}")]
895
+ matching = [
896
+ event_dict[name].get_id()
897
+ for name in event_dict.keys()
898
+ if name.endswith(f".{event}")
899
+ ]
877
900
  if event in event_dict:
878
901
  return event_dict[event].get_id()
879
902
  if force_component or not matching:
@@ -910,7 +933,11 @@ class IntegrationTestAPI(DataHandler):
910
933
 
911
934
  if not predicates.is_predicate(event) and event is not None:
912
935
  event = self.translate_event_name(event, force_component=False)
913
- event = predicates.is_a_member_of(event) if isinstance(event, list) else predicates.equal_to(event)
936
+ event = (
937
+ predicates.is_a_member_of(event)
938
+ if isinstance(event, list)
939
+ else predicates.equal_to(event)
940
+ )
914
941
 
915
942
  if not predicates.is_predicate(args) and args is not None:
916
943
  args = predicates.args_predicate(args)
@@ -1097,6 +1124,38 @@ class IntegrationTestAPI(DataHandler):
1097
1124
  self.__assert_pred("Event count", count_pred, len(results), msg)
1098
1125
  return results
1099
1126
 
1127
+ ######################################################################################
1128
+ # File Uplink functions
1129
+ ######################################################################################
1130
+
1131
+ def uplink_file_and_await_completion(self, file_path, destination=None, timeout=10):
1132
+ """
1133
+ This function will upload a file and wait for its completion, awaiting for the
1134
+ FileReceived event.
1135
+
1136
+ Args:
1137
+ file_path: the path to the file to upload
1138
+ destination: the destination path for the uploaded file
1139
+ timeout: the maximum time to wait for the event
1140
+ """
1141
+ self.uplink_file(file_path, destination)
1142
+ self.await_event("FileReceived", timeout=timeout)
1143
+
1144
+ def uplink_file(self, file_path, destination=None):
1145
+ """
1146
+ This function will upload a file to the specified location.
1147
+
1148
+ Note: this will simply put the file on the outgoing queue. No guarantee
1149
+ is made on when the file will be delivered. To wait for the completion of
1150
+ the file uplink, use uplink_file_and_await_completion()
1151
+
1152
+ Args:
1153
+ file_path: the path to the file to upload
1154
+ """
1155
+ uplink_file = Path(self.pipeline.up_store) / Path(file_path).name
1156
+ shutil.copy2(file_path, uplink_file)
1157
+ self.pipeline.files.uplinker.enqueue(str(uplink_file), destination)
1158
+
1100
1159
  ######################################################################################
1101
1160
  # History Searches
1102
1161
  ######################################################################################
@@ -1207,11 +1266,13 @@ class IntegrationTestAPI(DataHandler):
1207
1266
  return searcher.get_return_value()
1208
1267
  time.sleep(0.1)
1209
1268
  except self.TimeoutException:
1210
- self.__log(f'{name} timed out and ended unsuccessfully.', TestLogger.YELLOW)
1269
+ self.__log(
1270
+ f"{name} timed out and ended unsuccessfully.", TestLogger.YELLOW
1271
+ )
1211
1272
  finally:
1212
1273
  signal.alarm(0)
1213
1274
  else:
1214
- self.__log(f'{name} ended unsuccessfully.', TestLogger.YELLOW)
1275
+ self.__log(f"{name} ended unsuccessfully.", TestLogger.YELLOW)
1215
1276
  return searcher.get_return_value()
1216
1277
 
1217
1278
  def find_history_item(self, search_pred, history, start=None, timeout=0):
@@ -1365,7 +1426,9 @@ class IntegrationTestAPI(DataHandler):
1365
1426
  self.log(f"Count search counted another item: {item}")
1366
1427
  self.ret_val.append(item)
1367
1428
  if self.count_pred(len(self.ret_val)):
1368
- msg = f"Count search found a correct amount: {len(self.ret_val)}"
1429
+ msg = (
1430
+ f"Count search found a correct amount: {len(self.ret_val)}"
1431
+ )
1369
1432
  self.log(msg, TestLogger.YELLOW)
1370
1433
  return True
1371
1434
  return False
@@ -851,6 +851,11 @@ class LogDeployParser(ParserBase):
851
851
  "default": "INFO",
852
852
  "help": "Set the logging level of GDS processes [default: %(default)s]",
853
853
  },
854
+ ("--disable-data-logging",): {
855
+ "action": "store_true",
856
+ "default": False,
857
+ "help": "Disable logging of each data item",
858
+ },
854
859
  }
855
860
 
856
861
  def handle_arguments(self, args, **kwargs):
@@ -1071,6 +1076,7 @@ class StandardPipelineParser(CompositeParser):
1071
1076
  "packet_spec": args_ns.packet_spec,
1072
1077
  "packet_set_name": args_ns.packet_set_name,
1073
1078
  "logging_prefix": args_ns.logs,
1079
+ "data_logging_enabled": not args_ns.disable_data_logging
1074
1080
  }
1075
1081
  pipeline = pipeline if pipeline else StandardPipeline()
1076
1082
  pipeline.transport_implementation = args_ns.connection_transport
@@ -0,0 +1,206 @@
1
+ """ fprime_gds.executables.dictionary_merge: script to merge two F Prime dictionaries """
2
+
3
+ import argparse
4
+ import functools
5
+ import json
6
+ import re
7
+ import sys
8
+ from pathlib import Path
9
+
10
+
11
+ def validate_metadata(metadata1, metadata2):
12
+ """ Check consistency between metadata blocks
13
+
14
+ The JSON dictionary has multiple fields in the metadata block. This function will check that there is consistency
15
+ between these two blocks.
16
+
17
+ Args:
18
+ metadata1: metadata from the first dictionary
19
+ metadata2: metadata from the second dictionary
20
+ """
21
+ for field in ["projectVersion", "frameworkVersion", "dictionarySpecVersion"]:
22
+ value1 = metadata1[field]
23
+ value2 = metadata2[field]
24
+ if value1 != value2:
25
+ raise ValueError(f"Inconsistent metadata values for field '{field}'. ({value1} vs {value2})")
26
+
27
+ def validate_non_unique(non_unique1, non_unique2):
28
+ """ Validate non-unique definitions are consistent between dictionaries """
29
+ indexed_non_unique1 = {value.get("qualifiedName"): value for value in non_unique1}
30
+
31
+ for value2 in non_unique2:
32
+ value1 = indexed_non_unique1.get(value2["qualifiedName"], None)
33
+ if value1 is not None and value1 != value2:
34
+ raise ValueError(f"'{value2['qualifiedName']}' has inconsistent definitions")
35
+
36
+ def validate_unique(unique1, unique2):
37
+ """ Validate unique definitions have no duplication """
38
+ ids = {item.get("id", item.get("opcode", "")) for item in unique1}
39
+ names = {item.get("name") for item in unique1}
40
+
41
+
42
+ for value2 in unique2:
43
+ name = value2['name']
44
+ id = value2.get("id", value2.get("opcode", ""))
45
+ if name in names:
46
+ raise ValueError(f"'{name}' appears in both dictionaries")
47
+ if id and id in ids:
48
+ raise ValueError(f"ID/Opcode {id} used in both dictionaries")
49
+
50
+
51
+ def merge_metadata(meta1, meta2, name=None, permissive=False):
52
+ """ Merge JSON dictionary metadata blocks
53
+
54
+ The JSON dictionary starts with a metadata block. This function will merge the two metadata blocks preferring the
55
+ first when there is a discrepancy. 'name' will be supplied as the new name defaulting to "name1_name2_merged" when
56
+ not supplied. If 'permissive' is true, version discrepancies will be ignored otherwise this will throw a ValueError
57
+ if the versions do not match.
58
+
59
+ Args:
60
+ meta1: first metadata block
61
+ meta2: second metadata block
62
+ name: (optional) name for the new dictionary (Default: meta.name_meta2.name_merged)
63
+ permissive: (optional) True to allow version miss-matching. (Default: False)
64
+ Return:
65
+ merged metadata block
66
+ Throws:
67
+ ValueError on version miss-match without the permissive flag
68
+ """
69
+ if not permissive:
70
+ validate_metadata(meta1, meta2)
71
+ if name is None:
72
+ name = f"{meta1.get('deploymentName', 'unknown')}_{meta2.get('deploymentName', 'unknown')}_merged"
73
+ return {
74
+ **meta1,
75
+ **{
76
+ "deploymentName": name
77
+ }
78
+ }
79
+
80
+ def merge_lists(list1, list2, validator):
81
+ """ Merge list-like entities
82
+
83
+ This will merge two list-like entities using the supplied validator.
84
+
85
+ Args:
86
+ list1: first list-like
87
+ list2: second list-like
88
+ validator: validate the lists are consistent or non-colliding
89
+
90
+ """
91
+ validator(list1, list2)
92
+ singular = {item.get("qualifiedName", item.get("name", "")): item for item in list1 + list2}
93
+ return list(singular.values())
94
+
95
+ def merge_non_unique(non_unique1, non_unique2):
96
+ """ Merge the non-unique blocks in JSON dictionaries
97
+
98
+ JSON dictionaries have some non-unique definitions (e.g. "typeDefinitions") that must be merged ensuring
99
+ consistency but ignoring duplication. This function will create a superset of the two blocks. Inconsistent
100
+ definitions will result in a ValueError.
101
+
102
+ Args:
103
+ non_unique1: first non unique block
104
+ non_unique2: second non unique block
105
+ """
106
+ return merge_lists(non_unique1, non_unique2, validate_non_unique)
107
+
108
+
109
+ def merge_unique(unique1, unique2):
110
+ """ Merge the unique blocks in JSON dictionaries
111
+
112
+ JSON dictionaries have some unique definitions (e.g. "eventDefinitions") that must be merged ensuring that entries
113
+ are not duplicated between the sets. This function will create a superset of the two blocks. Duplicated definitions
114
+ will result in a ValueError.
115
+
116
+ Args:
117
+ unique1: first unique block
118
+ unique2: second unique block
119
+ """
120
+ return merge_lists(unique1, unique2, validate_unique)
121
+
122
+
123
+ def merge_dictionaries(dictionary1, dictionary2, name=None, permissive=False):
124
+ """ Merge two dictionaries
125
+
126
+ This will merge two JSON dictionaries' major top-level sections. Unknown fields will be preserved preferring
127
+ dictionary1's content for unknown fields.
128
+
129
+ Args:
130
+ dictionary1: dictionary 1's content
131
+ dictionary2: dictionary 2's content
132
+ name: new 'deploymentName' field
133
+ permissive: allow miss-matched dictionary versions
134
+
135
+ Return: merged dictionaries
136
+
137
+ """
138
+ merge_metadata_fn = functools.partial(merge_metadata, name=name, permissive=permissive)
139
+
140
+ stages = [
141
+ ("metadata", merge_metadata_fn),
142
+ ("typeDefinitions", merge_non_unique),
143
+ ("constants", merge_non_unique),
144
+ ("commands", merge_unique),
145
+ ("parameters", merge_unique),
146
+ ("events", merge_unique),
147
+ ("telemetryChannels", merge_unique),
148
+ ("records", merge_unique),
149
+ ("containers", merge_unique),
150
+ ("telemetryPacketSets", merge_unique),
151
+
152
+ ]
153
+
154
+ merged = {**dictionary2, **dictionary1}
155
+ for field, merger in stages:
156
+ object1 = dictionary1[field]
157
+ object2 = dictionary2[field]
158
+ try:
159
+ merged[field] = merger(object1, object2)
160
+ except ValueError as value_error:
161
+ raise ValueError(f"Merging '{field}' failed. {value_error}")
162
+ except KeyError as key_error:
163
+ raise ValueError(f"Malformed dictionary section '{field}'. Missing key: {key_error}")
164
+ return merged
165
+
166
+ def parse_arguments():
167
+ """ Parse arguments for this script """
168
+ parser = argparse.ArgumentParser(description="Merge two dictionaries")
169
+ parser.add_argument("--name", type=str, default=None, help="Name to use as the new 'deploymentName' field")
170
+ parser.add_argument("--output", type=Path, default=Path("MergedAppDictionary.json"),
171
+ help="Output dictionary path. Default: MergedAppDictionary.json")
172
+ parser.add_argument("--permissive", action="store_true", default=False,
173
+ help="Ignore discrepancies between dictionaries")
174
+ parser.add_argument("dictionary1", type=Path, help="Primary dictionary to merge")
175
+ parser.add_argument("dictionary2", type=Path, help="Secondary dictionary to merge")
176
+
177
+ args = parser.parse_args()
178
+
179
+ # Validate arguments
180
+ if args.name is not None and not re.match("[a-zA-Z_][a-zA-Z_0-9]*"):
181
+ raise ValueError(f"--name '{args.name}' is an invalid identifier")
182
+ if not args.dictionary1.exists():
183
+ raise ValueError(f"'{args.dictionary1}' does not exist")
184
+ if not args.dictionary2.exists():
185
+ raise ValueError(f"'{args.dictionary2}' does not exist")
186
+ return args
187
+
188
+ def main():
189
+ """ Main entry point """
190
+ try:
191
+ args = parse_arguments()
192
+ # Open dictionaries
193
+ with open(args.dictionary1, "r") as dictionary1_fh:
194
+ dictionary1 = json.load(dictionary1_fh)
195
+ with open(args.dictionary2, "r") as dictionary2_fh:
196
+ dictionary2 = json.load(dictionary2_fh)
197
+ output = merge_dictionaries(dictionary1, dictionary2, args.name, args.permissive)
198
+ with open(args.output, "w") as output_fh:
199
+ json.dump(output, output_fh, indent=2)
200
+ except Exception as exception:
201
+ print(f"[ERROR] {exception}", file=sys.stderr)
202
+ sys.exit(1)
203
+ sys.exit(0)
204
+
205
+ if __name__ == "__main__":
206
+ main()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: fprime-gds
3
- Version: 4.0.0a10
3
+ Version: 4.0.2a1
4
4
  Summary: F Prime Flight Software Ground Data System layer
5
5
  Author-email: Michael Starch <Michael.D.Starch@jpl.nasa.gov>, Thomas Boyer-Chammard <Thomas.Boyer.Chammard@jpl.nasa.gov>
6
6
  License:
@@ -35,7 +35,7 @@ fprime_gds/common/decoders/event_decoder.py,sha256=ib-O18V5Z7bcnUUSDE9R0fU--bAZs
35
35
  fprime_gds/common/decoders/file_decoder.py,sha256=Ky2U8bli3YL6GbT9jSSvI73ySOtf0cdZLK4FXTuWjfA,2542
36
36
  fprime_gds/common/decoders/pkt_decoder.py,sha256=kW8k3OSbMy96w6MzsGWp656lAQvwxrIznWkD3Sbi8Ig,3329
37
37
  fprime_gds/common/distributor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
- fprime_gds/common/distributor/distributor.py,sha256=ay2b-eMuBEGzQwNoSBLK341CXxGnrFHSIcRKErzIQIU,7999
38
+ fprime_gds/common/distributor/distributor.py,sha256=jged1utucsYkVBm5tSaFEXHDg8suiJ_Hn-9YLPCaVXA,8036
39
39
  fprime_gds/common/encoders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
40
40
  fprime_gds/common/encoders/ch_encoder.py,sha256=TBrTJ7TK4WwCh6KAspozh63WcPxrMImloB8tz7qeulw,2878
41
41
  fprime_gds/common/encoders/cmd_encoder.py,sha256=5wG5854ozmxctnYou3q9MdQNkTQEmpCiT4oBVgNRZdE,3499
@@ -100,7 +100,7 @@ fprime_gds/common/pipeline/encoding.py,sha256=PttJ8NmXm75mLXyhlmxOJqE8RFt46q1dTh
100
100
  fprime_gds/common/pipeline/files.py,sha256=J2zm0sucvImtmSnv0iUp5uTpvUO8nlmz2lUdMuMC5aM,2244
101
101
  fprime_gds/common/pipeline/histories.py,sha256=7KyboNnm9OARQk4meVPSSeYpeqH0G8RWRiy0BLBL1rw,3671
102
102
  fprime_gds/common/pipeline/router.py,sha256=-P1wI0KXEh_snOzDaq8CjEoWuM_zRm8vUMR1T0oY9qQ,2327
103
- fprime_gds/common/pipeline/standard.py,sha256=fDSPfyhYPMNhev5IQG2j51sCtQxXZ5PrqmulKH8TNjE,9778
103
+ fprime_gds/common/pipeline/standard.py,sha256=5aivCoPKfZXH_g0pk4fHHwnNt_dEcj-HMWAa4LleCbA,9837
104
104
  fprime_gds/common/templates/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
105
105
  fprime_gds/common/templates/ch_template.py,sha256=1MoDZsia0dI_CvnIttwyKLhbQhum35OcJnFc50Xohuo,3893
106
106
  fprime_gds/common/templates/cmd_template.py,sha256=n91z4WhFgHwTu6_fQqy7JqpkEObAkllIeEy0AR0DvrQ,5455
@@ -109,7 +109,7 @@ fprime_gds/common/templates/event_template.py,sha256=L0hkWB_kEMhTNodPUqBAev76SMm
109
109
  fprime_gds/common/templates/pkt_template.py,sha256=5Wi6389m5j8w7JITBGfeUnw6CYE1-hjcVJ42NJmLDcE,1794
110
110
  fprime_gds/common/templates/prm_template.py,sha256=qd0UX4ARZuPWvnFbU_DO3HkQY4QgMfqPxNcNhk-dl9A,2303
111
111
  fprime_gds/common/testing_fw/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
112
- fprime_gds/common/testing_fw/api.py,sha256=lnDiyCrnj8Q8uzrmPeFOewSUPX_BvHRisY2jk-4G3eg,66045
112
+ fprime_gds/common/testing_fw/api.py,sha256=ATxIUWVMjoVUllTr0zd42--xgHE1g_OJKq4ec0UFd5k,67848
113
113
  fprime_gds/common/testing_fw/predicates.py,sha256=CsHsVs_EVXCLQLd2NVOvy8MxmUQVxLMr3i1ouEUqOtQ,18371
114
114
  fprime_gds/common/testing_fw/pytest_integration.py,sha256=CAvuH9_3RuKplKQVB3t_jerPr-LPzwPWoM6z3lMs16g,6203
115
115
  fprime_gds/common/tools/README.md,sha256=WVEciyfsbEVGmb9xR5A6Ioy5pBVnCsWOIJfySLeq9YM,2325
@@ -123,9 +123,10 @@ fprime_gds/common/utils/event_severity.py,sha256=7qPXHrDaM_REJ7sKBUEJTZIE0D4qVnV
123
123
  fprime_gds/common/utils/string_util.py,sha256=u_2iahRG3ROu3lAAt_KVcK226gEByElXqrA8mH8eDpI,3584
124
124
  fprime_gds/executables/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
125
125
  fprime_gds/executables/apps.py,sha256=u79T_PlgMmNmA4YwWjs7LvPMCJnrjnURr05NMthOYP0,13350
126
- fprime_gds/executables/cli.py,sha256=Qqq3JQOqTsfDRsbTALw25xnwN7fCEVlpcKDV4jvGopQ,50961
126
+ fprime_gds/executables/cli.py,sha256=Cv8gHPqPrDil87Gv0dNFyWpRrp4XBC82-tmIy5kbz_U,51223
127
127
  fprime_gds/executables/comm.py,sha256=08rO0o0MJgTRngB7Ygu2IL_gEAWKF7WFvFyro1CqReE,5214
128
128
  fprime_gds/executables/data_product_writer.py,sha256=e1Rp2LT_Cpg08f0Ki8GhirC7Wn6LtYiAef7KLAkZHUY,37773
129
+ fprime_gds/executables/dictionary_merge.py,sha256=3Zy8LcbEdBwmNX2GLN_Nr1DfbLyWYHg-y4jdjTVCXgY,7966
129
130
  fprime_gds/executables/fprime_cli.py,sha256=CMoT7zWNwM8h2mSZW03AR96wl_XnZXoLNiOZN_sDi38,12431
130
131
  fprime_gds/executables/run_deployment.py,sha256=Zl0Y9-6i6c8tZhcS7XkAeVQtzn0d9fV-3UJQZ0bnBrc,7237
131
132
  fprime_gds/executables/tcpserver.py,sha256=KspVpu5YIuiWKOk5E6UDMKvqXYrRB1j9aX8CkMxysfw,17555
@@ -244,10 +245,10 @@ fprime_gds/flask/static/third-party/webfonts/fa-solid-900.woff2,sha256=mDS4KtJuK
244
245
  fprime_gds/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
245
246
  fprime_gds/plugin/definitions.py,sha256=QlxW1gNvoiqGMslSJjh3dTFZuv0igFHawN__3XJ0Wns,5355
246
247
  fprime_gds/plugin/system.py,sha256=M9xb-8jBhCUUx3X1z2uAP8Wx_v6NkL8JeaFgGcMnQqY,13432
247
- fprime_gds-4.0.0a10.dist-info/licenses/LICENSE.txt,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
248
- fprime_gds-4.0.0a10.dist-info/licenses/NOTICE.txt,sha256=vXjA_xRcQhd83Vfk5D_vXg5kOjnnXvLuMi5vFKDEVmg,1612
249
- fprime_gds-4.0.0a10.dist-info/METADATA,sha256=zD-VViHWpGeATD0SzBD1-yKMFOR25JIEIM3uBqMlY5Q,24577
250
- fprime_gds-4.0.0a10.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
251
- fprime_gds-4.0.0a10.dist-info/entry_points.txt,sha256=V2XMHMUJUGTVx5s3_kK1jLmoxSKE1vvj2XWHH9y49WQ,423
252
- fprime_gds-4.0.0a10.dist-info/top_level.txt,sha256=6vzFLIX6ANfavKaXFHDMSLFtS94a6FaAsIWhjgYuSNE,27
253
- fprime_gds-4.0.0a10.dist-info/RECORD,,
248
+ fprime_gds-4.0.2a1.dist-info/licenses/LICENSE.txt,sha256=z8d0m5b2O9McPEK1xHG_dWgUBT6EfBDz6wA0F7xSPTA,11358
249
+ fprime_gds-4.0.2a1.dist-info/licenses/NOTICE.txt,sha256=vXjA_xRcQhd83Vfk5D_vXg5kOjnnXvLuMi5vFKDEVmg,1612
250
+ fprime_gds-4.0.2a1.dist-info/METADATA,sha256=7qyu-NnTZ5K22Lo9dJ4Db_4FqR05UWoWZnNDesd__Ks,24576
251
+ fprime_gds-4.0.2a1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
252
+ fprime_gds-4.0.2a1.dist-info/entry_points.txt,sha256=16r0xeF-Qn-ducW_QvHiyrFnNjnLK-OOaTbqQIQpd0o,494
253
+ fprime_gds-4.0.2a1.dist-info/top_level.txt,sha256=6vzFLIX6ANfavKaXFHDMSLFtS94a6FaAsIWhjgYuSNE,27
254
+ fprime_gds-4.0.2a1.dist-info/RECORD,,
@@ -3,6 +3,7 @@ fprime-cli = fprime_gds.executables.fprime_cli:main
3
3
  fprime-dp-write = fprime_gds.executables.data_product_writer:main
4
4
  fprime-fpyc = fprime_gds.common.fpy.main:main
5
5
  fprime-gds = fprime_gds.executables.run_deployment:main
6
+ fprime-merge-dictionary = fprime_gds.executables.dictionary_merge:main
6
7
  fprime-prm-write = fprime_gds.common.tools.params:main
7
8
  fprime-seqgen = fprime_gds.common.tools.seqgen:main
8
9