fprime-gds 3.6.1__py3-none-any.whl → 4.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fprime_gds/common/communication/adapters/ip.py +14 -9
- fprime_gds/common/communication/adapters/uart.py +34 -25
- fprime_gds/common/communication/ccsds/__init__.py +0 -0
- fprime_gds/common/communication/ccsds/apid.py +19 -0
- fprime_gds/common/communication/ccsds/chain.py +106 -0
- fprime_gds/common/communication/ccsds/space_data_link.py +196 -0
- fprime_gds/common/communication/ccsds/space_packet.py +129 -0
- fprime_gds/common/communication/framing.py +27 -32
- fprime_gds/common/decoders/ch_decoder.py +1 -1
- fprime_gds/common/decoders/event_decoder.py +9 -2
- fprime_gds/common/decoders/pkt_decoder.py +1 -1
- fprime_gds/common/distributor/distributor.py +6 -3
- fprime_gds/common/encoders/ch_encoder.py +2 -2
- fprime_gds/common/encoders/cmd_encoder.py +2 -2
- fprime_gds/common/encoders/event_encoder.py +2 -2
- fprime_gds/common/encoders/pkt_encoder.py +2 -2
- fprime_gds/common/encoders/seq_writer.py +2 -2
- fprime_gds/common/fpy/README.md +56 -0
- fprime_gds/common/fpy/SPEC.md +69 -0
- fprime_gds/common/fpy/__init__.py +0 -0
- fprime_gds/common/fpy/bytecode/__init__.py +0 -0
- fprime_gds/common/fpy/bytecode/directives.py +490 -0
- fprime_gds/common/fpy/codegen.py +1687 -0
- fprime_gds/common/fpy/grammar.lark +88 -0
- fprime_gds/common/fpy/main.py +40 -0
- fprime_gds/common/fpy/parser.py +239 -0
- fprime_gds/common/gds_cli/base_commands.py +1 -1
- fprime_gds/common/handlers.py +39 -0
- fprime_gds/common/loaders/fw_type_json_loader.py +54 -0
- fprime_gds/common/loaders/json_loader.py +15 -0
- fprime_gds/common/loaders/pkt_json_loader.py +125 -0
- fprime_gds/common/loaders/prm_json_loader.py +85 -0
- fprime_gds/common/logger/__init__.py +2 -2
- fprime_gds/common/pipeline/dictionaries.py +60 -41
- fprime_gds/common/pipeline/encoding.py +19 -0
- fprime_gds/common/pipeline/histories.py +4 -0
- fprime_gds/common/pipeline/standard.py +16 -2
- fprime_gds/common/templates/cmd_template.py +8 -0
- fprime_gds/common/templates/prm_template.py +81 -0
- fprime_gds/common/testing_fw/api.py +148 -1
- fprime_gds/common/testing_fw/pytest_integration.py +37 -3
- fprime_gds/common/tools/README.md +34 -0
- fprime_gds/common/tools/params.py +246 -0
- fprime_gds/common/utils/config_manager.py +6 -6
- fprime_gds/common/utils/data_desc_type.py +6 -1
- fprime_gds/executables/apps.py +189 -11
- fprime_gds/executables/cli.py +468 -127
- fprime_gds/executables/comm.py +5 -2
- fprime_gds/executables/data_product_writer.py +164 -165
- fprime_gds/executables/fprime_cli.py +3 -3
- fprime_gds/executables/run_deployment.py +13 -5
- fprime_gds/flask/app.py +3 -0
- fprime_gds/flask/resource.py +5 -2
- fprime_gds/flask/static/addons/chart-display/addon.js +8 -3
- fprime_gds/flask/static/js/datastore.js +1 -0
- fprime_gds/flask/static/js/vue-support/channel.js +1 -1
- fprime_gds/flask/static/js/vue-support/event.js +1 -1
- fprime_gds/plugin/definitions.py +86 -8
- fprime_gds/plugin/system.py +172 -58
- {fprime_gds-3.6.1.dist-info → fprime_gds-4.0.0.dist-info}/METADATA +23 -21
- {fprime_gds-3.6.1.dist-info → fprime_gds-4.0.0.dist-info}/RECORD +66 -50
- {fprime_gds-3.6.1.dist-info → fprime_gds-4.0.0.dist-info}/WHEEL +1 -1
- {fprime_gds-3.6.1.dist-info → fprime_gds-4.0.0.dist-info}/entry_points.txt +2 -0
- fprime_gds/common/loaders/ch_py_loader.py +0 -79
- fprime_gds/common/loaders/cmd_py_loader.py +0 -66
- fprime_gds/common/loaders/event_py_loader.py +0 -75
- fprime_gds/common/loaders/python_loader.py +0 -132
- {fprime_gds-3.6.1.dist-info → fprime_gds-4.0.0.dist-info/licenses}/LICENSE.txt +0 -0
- {fprime_gds-3.6.1.dist-info → fprime_gds-4.0.0.dist-info/licenses}/NOTICE.txt +0 -0
- {fprime_gds-3.6.1.dist-info → fprime_gds-4.0.0.dist-info}/top_level.txt +0 -0
fprime_gds/executables/comm.py
CHANGED
@@ -29,6 +29,7 @@ import fprime_gds.common.logger
|
|
29
29
|
import fprime_gds.executables.cli
|
30
30
|
from fprime_gds.common.communication.updown import Downlinker, Uplinker
|
31
31
|
from fprime_gds.common.zmq_transport import ZmqGround
|
32
|
+
from fprime_gds.plugin.system import Plugins
|
32
33
|
|
33
34
|
# Uses non-standard PIP package pyserial, so test the waters before getting a hard-import crash
|
34
35
|
try:
|
@@ -46,6 +47,8 @@ def main():
|
|
46
47
|
|
47
48
|
:return: return code
|
48
49
|
"""
|
50
|
+
# comm.py supports 2 and only 2 plugin categories
|
51
|
+
Plugins.system(["communication", "framing"])
|
49
52
|
args, _ = fprime_gds.executables.cli.ParserBase.parse_args(
|
50
53
|
[
|
51
54
|
fprime_gds.executables.cli.LogDeployParser,
|
@@ -71,11 +74,11 @@ def main():
|
|
71
74
|
args.tts_addr, args.tts_port
|
72
75
|
)
|
73
76
|
|
74
|
-
adapter =
|
77
|
+
adapter = Plugins.system().get_selected_class("communication")()
|
75
78
|
|
76
79
|
# Set the framing class used and pass it to the uplink and downlink component constructions giving each a separate
|
77
80
|
# instantiation
|
78
|
-
framer_instance =
|
81
|
+
framer_instance = Plugins.system().get_selected_class("framing")()
|
79
82
|
LOGGER.info(
|
80
83
|
"Starting uplinker/downlinker connecting to FSW using %s with %s",
|
81
84
|
args.communication_selection,
|
@@ -72,7 +72,7 @@ import json
|
|
72
72
|
import os
|
73
73
|
import sys
|
74
74
|
from typing import List, Dict, Union, ForwardRef
|
75
|
-
from pydantic import BaseModel, field_validator
|
75
|
+
from pydantic import BaseModel, field_validator, computed_field, model_validator
|
76
76
|
from typing import List, Union
|
77
77
|
import argparse
|
78
78
|
from binascii import crc32
|
@@ -88,128 +88,7 @@ class bcolors:
|
|
88
88
|
UNDERLINE = '\033[4m'
|
89
89
|
|
90
90
|
|
91
|
-
|
92
|
-
# then this can be removed.
|
93
|
-
header_data = {
|
94
|
-
"typeDefinitions" : [
|
95
|
-
{
|
96
|
-
"kind": "array",
|
97
|
-
"qualifiedName": "UserDataArray",
|
98
|
-
"size": 32,
|
99
|
-
"elementType": {
|
100
|
-
"name": "U8",
|
101
|
-
"kind": "integer",
|
102
|
-
"signed": False,
|
103
|
-
"size": 8
|
104
|
-
}
|
105
|
-
},
|
106
|
-
|
107
|
-
{
|
108
|
-
"kind": "struct",
|
109
|
-
"qualifiedName": "timeStruct",
|
110
|
-
"members": {
|
111
|
-
"seconds": {
|
112
|
-
"type": {
|
113
|
-
"name": "U32",
|
114
|
-
"kind": "integer",
|
115
|
-
"signed": False,
|
116
|
-
"size": 32
|
117
|
-
}
|
118
|
-
},
|
119
|
-
"useconds": {
|
120
|
-
"type": {
|
121
|
-
"name": "U32",
|
122
|
-
"kind": "integer",
|
123
|
-
"signed": False,
|
124
|
-
"size": 32
|
125
|
-
}
|
126
|
-
},
|
127
|
-
"timeBase": {
|
128
|
-
"type": {
|
129
|
-
"name": "U16",
|
130
|
-
"kind": "integer",
|
131
|
-
"signed": False,
|
132
|
-
"size": 16
|
133
|
-
}
|
134
|
-
},
|
135
|
-
"context": {
|
136
|
-
"type": {
|
137
|
-
"name": "U8",
|
138
|
-
"kind": "integer",
|
139
|
-
"signed": False,
|
140
|
-
"size": 8
|
141
|
-
}
|
142
|
-
}
|
143
|
-
}
|
144
|
-
}
|
145
|
-
],
|
146
|
-
|
147
|
-
"enums" : [
|
148
|
-
],
|
149
|
-
|
150
|
-
"header": {
|
151
|
-
"PacketDescriptor": {
|
152
|
-
"type": {
|
153
|
-
"name": "U32",
|
154
|
-
"kind": "integer",
|
155
|
-
"signed": False,
|
156
|
-
"size": 32
|
157
|
-
}
|
158
|
-
},
|
159
|
-
"Id": {
|
160
|
-
"type": {
|
161
|
-
"name": "U32",
|
162
|
-
"kind": "integer",
|
163
|
-
"signed": False,
|
164
|
-
"size": 32
|
165
|
-
}
|
166
|
-
},
|
167
|
-
"Priority": {
|
168
|
-
"type": {
|
169
|
-
"name": "U32",
|
170
|
-
"kind": "integer",
|
171
|
-
"signed": False,
|
172
|
-
"size": 32
|
173
|
-
}
|
174
|
-
},
|
175
|
-
"TimeTag": {
|
176
|
-
"type": {
|
177
|
-
"name": "timeStruct",
|
178
|
-
"kind": "qualifiedIdentifier"
|
179
|
-
}
|
180
|
-
},
|
181
|
-
"ProcTypes": {
|
182
|
-
"type": {
|
183
|
-
"name": "U8",
|
184
|
-
"kind": "integer",
|
185
|
-
"signed": False,
|
186
|
-
"size": 8
|
187
|
-
}
|
188
|
-
},
|
189
|
-
"UserData": {
|
190
|
-
"type": {
|
191
|
-
"name": "UserDataArray",
|
192
|
-
"kind": "qualifiedIdentifier"
|
193
|
-
}
|
194
|
-
},
|
195
|
-
"DpState": {
|
196
|
-
"type": {
|
197
|
-
"name": "U8",
|
198
|
-
"kind": "integer",
|
199
|
-
"signed": False,
|
200
|
-
"size": 8
|
201
|
-
}
|
202
|
-
},
|
203
|
-
"DataSize": {
|
204
|
-
"type": {
|
205
|
-
"name": "U16",
|
206
|
-
"kind": "integer",
|
207
|
-
"signed": False,
|
208
|
-
"size": 16
|
209
|
-
}
|
210
|
-
}
|
211
|
-
},
|
212
|
-
|
91
|
+
header_hash_data = {
|
213
92
|
"headerHash": {
|
214
93
|
"type": {
|
215
94
|
"name": "U32",
|
@@ -218,25 +97,6 @@ header_data = {
|
|
218
97
|
"size": 32
|
219
98
|
}
|
220
99
|
},
|
221
|
-
|
222
|
-
"dataId": {
|
223
|
-
"type": {
|
224
|
-
"name": "U32",
|
225
|
-
"kind": "integer",
|
226
|
-
"signed": False,
|
227
|
-
"size": 32
|
228
|
-
}
|
229
|
-
},
|
230
|
-
|
231
|
-
"dataSize": {
|
232
|
-
"type": {
|
233
|
-
"name": "U16",
|
234
|
-
"kind": "integer",
|
235
|
-
"signed": False,
|
236
|
-
"size": 16
|
237
|
-
}
|
238
|
-
},
|
239
|
-
|
240
100
|
"dataHash": {
|
241
101
|
"type": {
|
242
102
|
"name": "U32",
|
@@ -305,9 +165,21 @@ class BoolType(BaseModel):
|
|
305
165
|
if v != "bool":
|
306
166
|
raise ValueError('Check the "kind" field')
|
307
167
|
return v
|
168
|
+
|
169
|
+
class StringType(BaseModel):
|
170
|
+
name: str
|
171
|
+
kind: str
|
172
|
+
size: int
|
173
|
+
|
174
|
+
@field_validator('kind')
|
175
|
+
def kind_qualifiedIdentifier(cls, v):
|
176
|
+
if v != "string":
|
177
|
+
raise ValueError('Check the "kind" field')
|
178
|
+
return v
|
308
179
|
|
309
180
|
Type = ForwardRef('Type')
|
310
181
|
ArrayType = ForwardRef('ArrayType')
|
182
|
+
AliasType = ForwardRef('AliasType')
|
311
183
|
|
312
184
|
class QualifiedType(BaseModel):
|
313
185
|
kind: str
|
@@ -320,8 +192,9 @@ class QualifiedType(BaseModel):
|
|
320
192
|
return v
|
321
193
|
|
322
194
|
class StructMember(BaseModel):
|
323
|
-
type: Union[IntegerType, FloatType, BoolType, QualifiedType]
|
195
|
+
type: Union[IntegerType, FloatType, BoolType, StringType, QualifiedType]
|
324
196
|
size: int = 1
|
197
|
+
index: int
|
325
198
|
|
326
199
|
class StructType(BaseModel):
|
327
200
|
kind: str
|
@@ -333,12 +206,24 @@ class StructType(BaseModel):
|
|
333
206
|
if v != "struct":
|
334
207
|
raise ValueError('Check the "kind" field')
|
335
208
|
return v
|
209
|
+
|
210
|
+
class AliasType(BaseModel):
|
211
|
+
kind: str
|
212
|
+
qualifiedName: str
|
213
|
+
type: Union[AliasType, StructType, ArrayType, IntegerType, BoolType, FloatType, StringType, QualifiedType]
|
214
|
+
underlyingType: Union[StructType, ArrayType, IntegerType, BoolType, FloatType, StringType, QualifiedType]
|
215
|
+
|
216
|
+
@field_validator('kind')
|
217
|
+
def kind_qualifiedIdentifier(cls, v):
|
218
|
+
if v != "alias":
|
219
|
+
raise ValueError('Check the "kind" field')
|
220
|
+
return v
|
336
221
|
|
337
222
|
class ArrayType(BaseModel):
|
338
223
|
kind: str
|
339
224
|
qualifiedName: str
|
340
225
|
size: int
|
341
|
-
elementType: Union[StructType, ArrayType, IntegerType, FloatType, QualifiedType]
|
226
|
+
elementType: Union[AliasType, StructType, ArrayType, IntegerType, BoolType, FloatType, StringType, QualifiedType]
|
342
227
|
|
343
228
|
@field_validator('kind')
|
344
229
|
def kind_qualifiedIdentifier(cls, v):
|
@@ -346,6 +231,12 @@ class ArrayType(BaseModel):
|
|
346
231
|
raise ValueError('Check the "kind" field')
|
347
232
|
return v
|
348
233
|
|
234
|
+
class Constant(BaseModel):
|
235
|
+
qualifiedName: str
|
236
|
+
type: Union[IntegerType, FloatType, BoolType, StringType]
|
237
|
+
value: Union[int, float, bool, str]
|
238
|
+
|
239
|
+
|
349
240
|
class EnumeratedConstant(BaseModel):
|
350
241
|
name: str
|
351
242
|
value: int
|
@@ -366,11 +257,11 @@ class EnumType(BaseModel):
|
|
366
257
|
|
367
258
|
|
368
259
|
class Type(BaseModel):
|
369
|
-
type: Union[StructType, ArrayType, IntegerType, FloatType, BoolType, QualifiedType]
|
260
|
+
type: Union[AliasType, StructType, ArrayType, IntegerType, FloatType, BoolType, QualifiedType, StringType]
|
370
261
|
|
371
262
|
class RecordStruct(BaseModel):
|
372
263
|
name: str
|
373
|
-
type: Union[StructType, ArrayType, IntegerType, FloatType, BoolType, QualifiedType]
|
264
|
+
type: Union[AliasType, StructType, ArrayType, IntegerType, FloatType, BoolType, QualifiedType, StringType]
|
374
265
|
array: bool
|
375
266
|
id: int
|
376
267
|
annotation: str
|
@@ -388,7 +279,8 @@ class ContainerStruct(BaseModel):
|
|
388
279
|
|
389
280
|
class FprimeDict(BaseModel):
|
390
281
|
metadata: Dict[str, Union[str, List[str]]]
|
391
|
-
typeDefinitions: List[Union[ArrayType, StructType, EnumType]]
|
282
|
+
typeDefinitions: List[Union[AliasType, ArrayType, StructType, EnumType]]
|
283
|
+
constants: List[Constant]
|
392
284
|
records: List[RecordStruct]
|
393
285
|
containers: List[ContainerStruct]
|
394
286
|
|
@@ -398,19 +290,82 @@ class FprimeDict(BaseModel):
|
|
398
290
|
# -------------------------------------------------------------------------------------
|
399
291
|
|
400
292
|
class DPHeader(BaseModel):
|
401
|
-
typeDefinitions: List[Union[ArrayType, StructType, EnumType]]
|
402
|
-
|
293
|
+
typeDefinitions: List[Union[AliasType, ArrayType, StructType, EnumType]]
|
294
|
+
constants: List[Constant]
|
403
295
|
headerHash: Type
|
404
|
-
dataId: Type
|
405
|
-
dataSize: Type
|
406
296
|
dataHash: Type
|
407
297
|
|
298
|
+
@computed_field
|
299
|
+
@property
|
300
|
+
def header(self) -> Dict[str, Union[Type, ArrayType, EnumType]]:
|
301
|
+
# Mapping from type/constant names to header field names
|
302
|
+
header_field_names = {
|
303
|
+
"FwPacketDescriptorType": "PacketDescriptor",
|
304
|
+
"FwDpIdType": "Id",
|
305
|
+
"FwDpPriorityType" : "Priority",
|
306
|
+
"Seconds": "Seconds",
|
307
|
+
"USeconds": "USeconds",
|
308
|
+
"FwTimeBaseStoreType": "TimeBase",
|
309
|
+
"FwTimeContextStoreType": "Context",
|
310
|
+
"Fw.DpCfg.ProcType": "ProcTypes",
|
311
|
+
"Fw.DpCfg.CONTAINER_USER_DATA_SIZE": "UserData",
|
312
|
+
"Fw.DpState": "DpState",
|
313
|
+
"FwSizeStoreType": "DataSize"
|
314
|
+
}
|
315
|
+
# All types/constants that make up the DP header
|
316
|
+
# Key: Values of above header_field_names
|
317
|
+
# Values: Initialized to None, populated below based on type names/constants in the JSON dictionary
|
318
|
+
header_dict = {v: None for v in header_field_names.values()}
|
319
|
+
for k, v in header_field_names.items():
|
320
|
+
# Seconds and USeconds are not in the dictionary, but are both always U32
|
321
|
+
if k == "Seconds" or k == "USeconds":
|
322
|
+
header_dict[v] = Type(type=IntegerType(name="U32", kind="integer", size=32, signed=False))
|
323
|
+
# According to Fw.Dp SDD, Header::UserData is an array of U8 of size Fw::DpCfg::CONTAINER_USER_DATA_SIZE.
|
324
|
+
elif k == "Fw.DpCfg.CONTAINER_USER_DATA_SIZE":
|
325
|
+
for t in self.constants:
|
326
|
+
if t.qualifiedName == k:
|
327
|
+
header_dict[v] = ArrayType(
|
328
|
+
kind="array",
|
329
|
+
qualifiedName="UserData",
|
330
|
+
size=t.value,
|
331
|
+
elementType=IntegerType(name="U8", kind="integer", size=8, signed=False)
|
332
|
+
)
|
333
|
+
break
|
334
|
+
else:
|
335
|
+
for t in self.typeDefinitions:
|
336
|
+
if t.qualifiedName == k:
|
337
|
+
header_dict[v] = t
|
338
|
+
break
|
339
|
+
|
340
|
+
return header_dict
|
341
|
+
|
342
|
+
@computed_field
|
343
|
+
@property
|
344
|
+
def dataId(self) -> AliasType:
|
345
|
+
return self.header.get("Id")
|
346
|
+
|
347
|
+
@computed_field
|
348
|
+
@property
|
349
|
+
def dataSize(self) -> AliasType:
|
350
|
+
return self.header.get("DataSize")
|
351
|
+
|
352
|
+
def get_size_store_bytes(self) -> int:
|
353
|
+
return self.header.get("DataSize").underlyingType.size // 8
|
354
|
+
|
355
|
+
@model_validator(mode='after')
|
356
|
+
def validate_header(self) -> 'DPHeader':
|
357
|
+
for k, v in self.header.items():
|
358
|
+
if not v:
|
359
|
+
raise ValueError(f'Dictionary is missing type definition or constant {k}.')
|
360
|
+
return self
|
361
|
+
|
408
362
|
ArrayType.model_rebuild()
|
363
|
+
AliasType.model_rebuild()
|
409
364
|
StructType.model_rebuild()
|
410
365
|
Type.model_rebuild()
|
411
366
|
|
412
|
-
TypeKind = Union[
|
413
|
-
TypeDef = Union[ArrayType, StructType]
|
367
|
+
TypeKind = Union[AliasType, ArrayType, StructType, IntegerType, FloatType, EnumType, BoolType, QualifiedType, StringType]
|
368
|
+
TypeDef = Union[AliasType, ArrayType, StructType]
|
414
369
|
|
415
370
|
# Map the JSON types to struct format strings
|
416
371
|
type_mapping = {
|
@@ -424,7 +379,8 @@ type_mapping = {
|
|
424
379
|
'U64': 'Q', # Unsigned 64-bit integer
|
425
380
|
'F32': 'f', # 32-bit float
|
426
381
|
'F64': 'd', # 64-bit float
|
427
|
-
'bool': '?' # An 8 bit boolean
|
382
|
+
'bool': '?', # An 8 bit boolean
|
383
|
+
'string': 's'
|
428
384
|
# Add more mappings as needed
|
429
385
|
}
|
430
386
|
|
@@ -538,6 +494,7 @@ class DataProductWriter:
|
|
538
494
|
self.binaryFileName = binaryFileName
|
539
495
|
self.totalBytesRead = 0
|
540
496
|
self.calculatedCRC = 0
|
497
|
+
self.headerJSON = None
|
541
498
|
|
542
499
|
|
543
500
|
# ----------------------------------------------------------------------------------------------
|
@@ -573,8 +530,29 @@ class DataProductWriter:
|
|
573
530
|
except KeyError:
|
574
531
|
raise KeyError(f"Unrecognized JSON Dictionary Type: {intType}")
|
575
532
|
data = struct.unpack(format_str, bytes_read)[0]
|
533
|
+
return data
|
534
|
+
|
535
|
+
def read_and_deserialize_string(self) -> str:
|
536
|
+
size_store_type_bytes = self.headerJSON.get_size_store_bytes()
|
537
|
+
bytes_read_store = self.binaryFile.read(size_store_type_bytes)
|
538
|
+
if len(bytes_read_store) != size_store_type_bytes:
|
539
|
+
raise IOError(f"Tried to read {size_store_type_bytes} bytes from the binary file, but failed.")
|
540
|
+
|
541
|
+
self.totalBytesRead += size_store_type_bytes
|
542
|
+
|
543
|
+
format_str = f'{BIG_ENDIAN}H'
|
544
|
+
string_size_data = struct.unpack(format_str, bytes_read_store)[0]
|
545
|
+
|
546
|
+
bytes_read = self.binaryFile.read(string_size_data)
|
547
|
+
if len(bytes_read) != string_size_data:
|
548
|
+
raise IOError(f"Tried to read {string_size_data} bytes from the binary file, but failed.")
|
576
549
|
|
550
|
+
self.calculatedCRC = crc32(bytes_read_store + bytes_read, self.calculatedCRC) & 0xffffffff
|
551
|
+
self.totalBytesRead += string_size_data
|
577
552
|
|
553
|
+
format_str = f'{BIG_ENDIAN}{string_size_data}s'
|
554
|
+
data = struct.unpack(format_str, bytes_read)[0]
|
555
|
+
data = data.decode()
|
578
556
|
return data
|
579
557
|
|
580
558
|
# -----------------------------------------------------------------------------------------------------------------------
|
@@ -621,7 +599,7 @@ class DataProductWriter:
|
|
621
599
|
# AssertionError: If the field_config is not an IntegerType, FloatType, or BoolType.
|
622
600
|
# -----------------------------------------------------------------------------------------------------------------------
|
623
601
|
|
624
|
-
def read_field(self, field_config: Union[IntegerType, FloatType, BoolType]) -> Union[int, float, bool]:
|
602
|
+
def read_field(self, field_config: Union[IntegerType, FloatType, BoolType, StringType]) -> Union[int, float, bool]:
|
625
603
|
|
626
604
|
if type(field_config) is IntegerType:
|
627
605
|
sizeBytes = field_config.size // 8
|
@@ -632,6 +610,9 @@ class DataProductWriter:
|
|
632
610
|
elif type(field_config) is BoolType:
|
633
611
|
sizeBytes = field_config.size // 8
|
634
612
|
|
613
|
+
elif type(field_config) is StringType:
|
614
|
+
return self.read_and_deserialize_string()
|
615
|
+
|
635
616
|
else:
|
636
617
|
assert False, "Unsupported typeKind encountered"
|
637
618
|
|
@@ -646,6 +627,7 @@ class DataProductWriter:
|
|
646
627
|
# The process varies depending on the field's type:
|
647
628
|
# - For basic types (IntegerType, FloatType, BoolType), it directly reads and assigns the value.
|
648
629
|
# - For EnumType, it reads the value, finds the corresponding enum identifier, and assigns it.
|
630
|
+
# - For AliasType, it reads and assigns the value based on the alias' underlying type.
|
649
631
|
# - For ArrayType, it creates a list, iteratively fills it with elements read recursively, and assigns the list.
|
650
632
|
# - For StructType, it constructs a nested dictionary by recursively processing each struct member.
|
651
633
|
# - For QualifiedType, it resolves the actual type from typeList and recursively processes the field.
|
@@ -678,6 +660,8 @@ class DataProductWriter:
|
|
678
660
|
elif isinstance(typeKind, BoolType):
|
679
661
|
parent_dict[field_name] = self.read_field(typeKind)
|
680
662
|
|
663
|
+
elif isinstance(typeKind, StringType):
|
664
|
+
parent_dict[field_name] = self.read_field(typeKind)
|
681
665
|
|
682
666
|
elif isinstance(typeKind, EnumType):
|
683
667
|
value = self.read_field(typeKind.representationType)
|
@@ -685,6 +669,8 @@ class DataProductWriter:
|
|
685
669
|
reverse_mapping = {enum.value: enum.name for enum in enum_mapping}
|
686
670
|
parent_dict[field_name] = reverse_mapping[value]
|
687
671
|
|
672
|
+
elif isinstance(typeKind, AliasType):
|
673
|
+
self.get_struct_item(field_name, typeKind.underlyingType, typeList, parent_dict)
|
688
674
|
|
689
675
|
elif isinstance(typeKind, ArrayType):
|
690
676
|
array_list = []
|
@@ -696,7 +682,8 @@ class DataProductWriter:
|
|
696
682
|
|
697
683
|
elif isinstance(typeKind, StructType):
|
698
684
|
array_list = []
|
699
|
-
|
685
|
+
sorted_members = dict(sorted(typeKind.members.items(), key=lambda member: member[1].index))
|
686
|
+
for key, member in sorted_members.items():
|
700
687
|
for i in range(member.size):
|
701
688
|
element_dict = {}
|
702
689
|
self.get_struct_item(key, member.type, typeList, element_dict)
|
@@ -738,7 +725,13 @@ class DataProductWriter:
|
|
738
725
|
rootDict = {}
|
739
726
|
|
740
727
|
for field_name, field_info in header_fields.items():
|
741
|
-
|
728
|
+
# Header is composed of enums, arrays, and aliases
|
729
|
+
if isinstance(field_info, EnumType):
|
730
|
+
self.get_struct_item(field_name, field_info.representationType, headerJSON.typeDefinitions, rootDict)
|
731
|
+
elif isinstance(field_info, ArrayType):
|
732
|
+
self.get_struct_item(field_name, field_info, headerJSON.typeDefinitions, rootDict)
|
733
|
+
else:
|
734
|
+
self.get_struct_item(field_name, field_info.type, headerJSON.typeDefinitions, rootDict)
|
742
735
|
|
743
736
|
computedHash = self.calculatedCRC
|
744
737
|
rootDict['headerHash'] = self.read_field(headerJSON.headerHash.type)
|
@@ -775,7 +768,7 @@ class DataProductWriter:
|
|
775
768
|
def get_record_data(self, headerJSON: DPHeader, dictJSON: FprimeDict) -> Dict[str, int]:
|
776
769
|
rootDict = {}
|
777
770
|
# Go through all the Records and find the one that matches recordId
|
778
|
-
rootDict['dataId'] = self.read_field(headerJSON.dataId.
|
771
|
+
rootDict['dataId'] = self.read_field(headerJSON.dataId.underlyingType)
|
779
772
|
for record in dictJSON.records:
|
780
773
|
if record.id == rootDict['dataId']:
|
781
774
|
print(f'Processing Record ID {record.id}')
|
@@ -866,18 +859,25 @@ class DataProductWriter:
|
|
866
859
|
print(f"Parsing {self.jsonDict}...")
|
867
860
|
try:
|
868
861
|
with open(self.jsonDict, 'r') as fprimeDictFile:
|
869
|
-
|
862
|
+
dict_json = json.load(fprimeDictFile)
|
863
|
+
dictJSON = FprimeDict(**dict_json)
|
864
|
+
|
865
|
+
header_json = header_hash_data
|
866
|
+
if "typeDefinitions" in dict_json:
|
867
|
+
header_json["typeDefinitions"] = dict_json["typeDefinitions"]
|
868
|
+
if "constants" in dict_json:
|
869
|
+
header_json["constants"] = dict_json["constants"]
|
870
|
+
self.headerJSON = DPHeader(**header_json)
|
871
|
+
|
870
872
|
except json.JSONDecodeError as e:
|
871
873
|
raise DictionaryError(self.jsonDict, e.lineno)
|
872
874
|
|
873
875
|
self.check_record_data(dictJSON)
|
874
876
|
|
875
|
-
headerJSON = DPHeader(**header_data)
|
876
|
-
|
877
877
|
with open(self.binaryFileName, 'rb') as self.binaryFile:
|
878
878
|
|
879
879
|
# Read the header data up until the Records
|
880
|
-
headerData = self.get_header_info(headerJSON)
|
880
|
+
headerData = self.get_header_info(self.headerJSON)
|
881
881
|
|
882
882
|
# Read the total data size
|
883
883
|
dataSize = headerData['DataSize']
|
@@ -888,13 +888,12 @@ class DataProductWriter:
|
|
888
888
|
recordList = [headerData]
|
889
889
|
|
890
890
|
while self.totalBytesRead < dataSize:
|
891
|
-
|
892
|
-
recordData = self.get_record_data(headerJSON, dictJSON)
|
891
|
+
recordData = self.get_record_data(self.headerJSON, dictJSON)
|
893
892
|
recordList.append(recordData)
|
894
893
|
|
895
894
|
computedCRC = self.calculatedCRC
|
896
895
|
# Read the data checksum
|
897
|
-
headerData['dataHash'] = self.read_field(headerJSON.dataHash.type)
|
896
|
+
headerData['dataHash'] = self.read_field(self.headerJSON.dataHash.type)
|
898
897
|
|
899
898
|
if computedCRC != headerData['dataHash']:
|
900
899
|
raise CRCError("Data", headerData['dataHash'], computedCRC)
|
@@ -14,7 +14,7 @@ from copy import deepcopy
|
|
14
14
|
from typing import Callable, List, Union
|
15
15
|
|
16
16
|
import argcomplete
|
17
|
-
import
|
17
|
+
import importlib.metadata
|
18
18
|
|
19
19
|
# NOTE: These modules are now only lazily loaded below as needed, due to slow
|
20
20
|
# performance when importing them
|
@@ -230,7 +230,7 @@ class CommandSubparserInjector(CliSubparserInjectorBase):
|
|
230
230
|
from fprime_gds.common.pipeline.dictionaries import Dictionaries
|
231
231
|
|
232
232
|
dictionary = Dictionaries()
|
233
|
-
dictionary.load_dictionaries(dict_path, None)
|
233
|
+
dictionary.load_dictionaries(dict_path, None, None)
|
234
234
|
command_names = dictionary.command_name.keys()
|
235
235
|
return [name for name in command_names if name.startswith(prefix)]
|
236
236
|
|
@@ -322,7 +322,7 @@ def create_parser():
|
|
322
322
|
parser = argparse.ArgumentParser(
|
323
323
|
description="provides utilities for interacting with the F' Ground Data System (GDS)"
|
324
324
|
)
|
325
|
-
fprime_gds_version =
|
325
|
+
fprime_gds_version = importlib.metadata.version("fprime-gds")
|
326
326
|
parser.add_argument("-V", "--version", action="version", version=fprime_gds_version)
|
327
327
|
|
328
328
|
# Add subcommands to the parser
|
@@ -9,6 +9,7 @@ import webbrowser
|
|
9
9
|
|
10
10
|
from fprime_gds.executables.cli import (
|
11
11
|
BinaryDeployment,
|
12
|
+
ConfigDrivenParser,
|
12
13
|
CommParser,
|
13
14
|
GdsParser,
|
14
15
|
ParserBase,
|
@@ -16,6 +17,7 @@ from fprime_gds.executables.cli import (
|
|
16
17
|
PluginArgumentParser,
|
17
18
|
)
|
18
19
|
from fprime_gds.executables.utils import AppWrapperException, run_wrapped_application
|
20
|
+
from fprime_gds.plugin.system import Plugins
|
19
21
|
|
20
22
|
BASE_MODULE_ARGUMENTS = [sys.executable, "-u", "-m"]
|
21
23
|
|
@@ -36,7 +38,7 @@ def parse_args():
|
|
36
38
|
PluginArgumentParser,
|
37
39
|
]
|
38
40
|
# Parse the arguments, and refine through all handlers
|
39
|
-
args, parser =
|
41
|
+
args, parser = ConfigDrivenParser.parse_args(arg_handlers, "Run F prime deployment and GDS")
|
40
42
|
return args
|
41
43
|
|
42
44
|
|
@@ -174,8 +176,8 @@ def launch_comm(parsed_args):
|
|
174
176
|
|
175
177
|
|
176
178
|
def launch_plugin(plugin_class_instance):
|
177
|
-
"""
|
178
|
-
plugin_name = getattr(plugin_class_instance, "get_name", lambda:
|
179
|
+
"""Launch a plugin instance"""
|
180
|
+
plugin_name = getattr(plugin_class_instance, "get_name", lambda: plugin_class_instance.__class__.__name__)()
|
179
181
|
return launch_process(
|
180
182
|
plugin_class_instance.get_process_invocation(),
|
181
183
|
name=f"{ plugin_name } Plugin App",
|
@@ -215,8 +217,14 @@ def main():
|
|
215
217
|
# Launch launchers and wait for the last app to finish
|
216
218
|
try:
|
217
219
|
procs = [launcher(parsed_args) for launcher in launchers]
|
218
|
-
_ = [
|
219
|
-
|
220
|
+
_ = [
|
221
|
+
launch_plugin(cls())
|
222
|
+
for cls in Plugins.system().get_feature_classes("gds_app")
|
223
|
+
]
|
224
|
+
_ = [
|
225
|
+
instance().run()
|
226
|
+
for instance in Plugins.system().get_feature_classes("gds_function")
|
227
|
+
]
|
220
228
|
|
221
229
|
print("[INFO] F prime is now running. CTRL-C to shutdown all components.")
|
222
230
|
procs[-1].wait()
|
fprime_gds/flask/app.py
CHANGED
@@ -84,6 +84,7 @@ def construct_app():
|
|
84
84
|
pipeline.dictionaries.command_name,
|
85
85
|
pipeline.dictionaries.project_version,
|
86
86
|
pipeline.dictionaries.framework_version,
|
87
|
+
pipeline.dictionaries.metadata,
|
87
88
|
],
|
88
89
|
)
|
89
90
|
api.add_resource(
|
@@ -103,6 +104,7 @@ def construct_app():
|
|
103
104
|
pipeline.dictionaries.event_id,
|
104
105
|
pipeline.dictionaries.project_version,
|
105
106
|
pipeline.dictionaries.framework_version,
|
107
|
+
pipeline.dictionaries.metadata,
|
106
108
|
],
|
107
109
|
)
|
108
110
|
api.add_resource(
|
@@ -117,6 +119,7 @@ def construct_app():
|
|
117
119
|
pipeline.dictionaries.channel_id,
|
118
120
|
pipeline.dictionaries.project_version,
|
119
121
|
pipeline.dictionaries.framework_version,
|
122
|
+
pipeline.dictionaries.metadata,
|
120
123
|
],
|
121
124
|
)
|
122
125
|
api.add_resource(
|
fprime_gds/flask/resource.py
CHANGED
@@ -17,17 +17,19 @@ class DictionaryResource(Resource):
|
|
17
17
|
should be flask compatible. Errors with the dictionary are a 500 server error and may not be recovered.
|
18
18
|
"""
|
19
19
|
|
20
|
-
def __init__(self, dictionary, project_version, framework_version):
|
20
|
+
def __init__(self, dictionary, project_version, framework_version, metadata):
|
21
21
|
"""Constructor used to setup for dictionary
|
22
22
|
|
23
23
|
Args:
|
24
24
|
dictionary: dictionary to serve when GET is called
|
25
25
|
project_version: project version for the dictionary
|
26
26
|
framework_version: project version for the dictionary
|
27
|
+
metadata (dict): additional metadata to serve with the dictionary
|
27
28
|
"""
|
28
29
|
self.dictionary = dictionary
|
29
30
|
self.project_version = project_version
|
30
31
|
self.framework_version = framework_version
|
32
|
+
self.metadata = metadata
|
31
33
|
|
32
34
|
def get(self):
|
33
35
|
"""HTTP GET method handler for dictionary resource
|
@@ -38,7 +40,8 @@ class DictionaryResource(Resource):
|
|
38
40
|
return {
|
39
41
|
"dictionary": self.dictionary,
|
40
42
|
"project_version": self.project_version,
|
41
|
-
"framework_version": self.framework_version
|
43
|
+
"framework_version": self.framework_version,
|
44
|
+
"metadata": self.metadata
|
42
45
|
}
|
43
46
|
|
44
47
|
|