Skip to content

Commit

Permalink
Additional fixes to packet telemetry script and packet contents
Browse files Browse the repository at this point in the history
  • Loading branch information
timcanham committed Jul 31, 2021
1 parent f4448fd commit 6c5bfc0
Show file tree
Hide file tree
Showing 2 changed files with 106 additions and 45 deletions.
80 changes: 38 additions & 42 deletions Autocoders/Python/bin/tlm_packet_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,11 @@ class TlmPacketParser(object):
def __init__(self, verbose = False, dependency = None):
self.verbose = verbose
self.dependency = dependency
self.size_dict = dict()

def add_type_size(self, type, size):
PRINT.debug("Type: %s size: %d"%(type,size))
self.size_dict[type] = size

def get_type_size(self,type_name, size):

Expand Down Expand Up @@ -198,15 +203,14 @@ def generate_channel_size_dict(self, the_parsed_topology_xml, xml_filename):

xml_list = []
for parsed_xml_type in parsed_xml_dict:
if parsed_xml_dict[parsed_xml_type] == None:
if parsed_xml_dict[parsed_xml_type] is None:
print("ERROR: XML of type {} is being used, but has not been parsed correctly. Check if file exists or add xml file with the 'import_component_type' tag to the Topology file.".format(parsed_xml_type))
raise Exception()
xml_list.append(parsed_xml_dict[parsed_xml_type])

topology_model.set_instance_xml_list(xml_list)

ch_size_dict = dict()
serializable_size_dict = dict()

for comp in the_parsed_topology_xml.get_instances():
comp_name = comp.get_name()
Expand All @@ -216,12 +220,12 @@ def generate_channel_size_dict(self, the_parsed_topology_xml, xml_filename):
PRINT.debug("Processing %s"%comp_name)

# check for included XML types
serializable_size_dict.update(self.process_enum_files(parsed_xml_dict[comp_type].get_enum_type_files()))
serializable_size_dict.update(self.process_array_files(parsed_xml_dict[comp_type].get_array_type_files()))
serializable_size_dict.update(self.process_serializable_files(parsed_xml_dict[comp_type].get_serializable_type_files()))
self.process_enum_files(parsed_xml_dict[comp_type].get_enum_type_files())
self.process_array_files(parsed_xml_dict[comp_type].get_array_type_files())
self.process_serializable_files(parsed_xml_dict[comp_type].get_serializable_type_files())

# check for channels
if (parsed_xml_dict[comp_type].get_channels() != None):
if (parsed_xml_dict[comp_type].get_channels() is not None):
for chan in parsed_xml_dict[comp_type].get_channels():
channel_name = comp_name + "." + chan.get_name()
if self.verbose:
Expand All @@ -231,11 +235,11 @@ def generate_channel_size_dict(self, the_parsed_topology_xml, xml_filename):
if type(chan_type) == type(tuple()):
chan_size = 4
# if channel is serializable
elif chan_type in serializable_size_dict:
chan_size = serializable_size_dict[chan_type]
elif chan_type in self.size_dict:
chan_size = self.size_dict[chan_type]
else:
chan_size = self.get_type_size(chan_type,chan.get_size())
if chan_size == None:
if chan_size is None:
print("Component %s channel %s type \"%s\" not found!"%(comp_name,channel_name,chan_type))
sys.exit(-1)
chan_id = int(chan.get_ids()[0],0) + comp_id;
Expand Down Expand Up @@ -301,15 +305,15 @@ def gen_packet_file(self, xml_filename):
# read in topology file
if entry.tag == "import_topology":
top_file = search_for_file("Packet",entry.text)
if top_file == None:
if top_file is None:
raise TlmPacketParseIOError("import file %s not found"%entry.text)
the_parsed_topology_xml = XmlTopologyParser.XmlTopologyParser(top_file)
deployment = the_parsed_topology_xml.get_deployment()
if self.verbose:
print("Found assembly or deployment named: %s\n" % deployment)
channel_size_dict = self.generate_channel_size_dict(the_parsed_topology_xml, xml_filename)
elif entry.tag == "packet":
if channel_size_dict == None:
if channel_size_dict is None:
raise TlmPacketParseValueError("%s: Topology import must be before packet definitions"%xml_filename)
packet_size = 0
packet_name = entry.attrib['name']
Expand Down Expand Up @@ -340,8 +344,7 @@ def gen_packet_file(self, xml_filename):
packet_size += (11 + 2 + 4) # raw packet size + time tag + packet id + packet descriptor
if (packet_size > max_size):
raise TlmPacketParseValueError("Packet %s is too large. Size: %d max: %d"%(packet_name,packet_size,max_size))
if self.verbose:
print("Packet %s size %d"%(packet_name,packet_size))
print("Packet %s size %d/%d"%(packet_name,packet_size,max_size))
total_packet_size += packet_size

if packet_level in size_dict:
Expand All @@ -354,7 +357,7 @@ def gen_packet_file(self, xml_filename):
vfd.close()

elif entry.tag == "ignore":
if channel_size_dict == None:
if channel_size_dict is None:
raise TlmPacketParseValueError("%s: Topology import must be before packet definitions"%xml_filename)
for channel in entry:
channel_name = channel.attrib['name']
Expand Down Expand Up @@ -386,8 +389,7 @@ def gen_packet_file(self, xml_filename):
missing_channels = True

if missing_channels:
pass
#raise TlmPacketParseValueError("Channels missing from packets")
raise TlmPacketParseValueError("Channels missing from packets")

header = "%sAc.hpp"%output_file_base
source = "%sAc.cpp"%output_file_base
Expand All @@ -408,71 +410,65 @@ def gen_packet_file(self, xml_filename):
source_target = target_directory + os.sep + source

# write dependency file
if self.dependency != None:
if self.dependency is not None:
dependency_file_txt = "\n%s %s: %s\n"%(source_target, header_target, top_file)
open(self.dependency,'w').write(dependency_file_txt)

def process_serializable_files(self, serializable_file_list):
serializable_size_dict = dict()
for serializable_file in serializable_file_list:
serializable_file = search_for_file("Serializable", serializable_file)
serializable_model = XmlSerializeParser.XmlSerializeParser(serializable_file)
# process XML includes
serializable_size_dict.update(self.process_enum_files(serializable_model.get_include_enums()))
serializable_size_dict.update(self.process_array_files(serializable_model.get_include_arrays()))
serializable_size_dict.update(self.process_serializable_files(serializable_model.get_includes()))
self.process_enum_files(serializable_model.get_include_enums())
self.process_array_files(serializable_model.get_include_arrays())
self.process_serializable_files(serializable_model.get_includes())
serializable_type = serializable_model.get_namespace() + "::" + serializable_model.get_name()
serializable_size = 0
for (member_name, member_type, member_size, member_format_specifier, member_comment, _) in serializable_model.get_members():
# if enumeration
if type(member_type) == type(tuple()):
type_size = 4 # Fixme: can we put this in a constant somewhere?
elif member_type in serializable_size_dict.keys(): # See if it is a registered type
type_size = serializable_size_dict[member_type]
elif member_type in self.size_dict.keys(): # See if it is a registered type
type_size = self.size_dict[member_type]
else:
type_size = self.get_type_size(member_type,member_size)
if (type_size == None):
if (type_size is None):
print("Illegal type %s in serializable %s"%(member_type,serializable_type))
sys.exit(-1)
serializable_size += type_size
serializable_size_dict[serializable_type] = serializable_size
self.add_type_size(serializable_type,serializable_size)
if self.verbose:
print("Serializable %s size %d"%(serializable_type,serializable_size))
return serializable_size_dict

def process_enum_files(self, enum_file_list):
enum_dict = dict()
for enum_file in enum_file_list:
enum_file = search_for_file("Enumeration", enum_file)
enum_model = XmlEnumParser.XmlEnumParser(enum_file)
enum_type = enum_model.get_namespace() + "::" + enum_model.get_name()
enum_dict[enum_type] = 4 # Fixme: can we put this in a constant somewhere?
return enum_dict
self.add_type_size(enum_type,4) # Fixme: can we put this in a constant somewhere?

def process_array_files(self, array_file_list):
array_dict = dict()
for array_file in array_file_list:
array_file = search_for_file("Array", array_file)
array_model = XmlArrayParser.XmlArrayParser(array_file)
# process any XML includes
array_dict.update(self.process_enum_files(array_model.get_include_enum_files()))
array_dict.update(self.process_array_files(array_model.get_include_array_files()))
array_dict.update(self.process_serializable_files(array_model.get_includes()))
self.process_enum_files(array_model.get_include_enum_files())
self.process_array_files(array_model.get_include_array_files())
self.process_serializable_files(array_model.get_includes())
array_type = array_model.get_namespace() + "::" + array_model.get_name()
array_size = array_model.get_size()
array_size = int(array_model.get_size())
elem_type = array_model.get_type()
type_size = None
elem_type_size = None
if type(elem_type) == type(tuple()):
type_size = 4 # Fixme: can we put this in a constant somewhere?
elif elem_type in array_dict.keys(): # See if it is a registered type
type_size = array_dict[elem_type]
elem_type_size = 4 # Fixme: can we put this in a constant somewhere?
elif elem_type in self.size_dict.keys(): # See if it is a registered type
elem_type_size = self.size_dict[elem_type]
else:
type_size = self.get_type_size(elem_type,1)
if (type_size == None):
elem_type_size = self.get_type_size(elem_type,1) # Fixme: strings?
if (elem_type_size is None):
print("Illegal type %s in array %s"%(elem_type,array_type))
sys.exit(-1)
array_dict[array_type] = 4 # Fixme: can we put this in a constant somewhere?
return array_dict
self.add_type_size(array_type,elem_type_size*array_size)


def pinit():
Expand Down
71 changes: 68 additions & 3 deletions Ref/Top/RefPacketsAi.xml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
<packets name="RefPackets" namespace="Ref" size="100">
<packets name="RefPackets" namespace="Ref" size="111">

<import_topology>Ref/Top/RefTopologyAppAi.xml</import_topology>

Expand All @@ -15,8 +15,11 @@
<channel name="fileUplink.PacketsReceived"/>
<channel name="fileUplinkBufferManager.TotalBuffs"/>
<channel name="fileUplinkBufferManager.CurrBuffs"/>
<channel name="fileUplinkBufferManager.HiBuffs"/>
<channel name="fileDownlink.FilesSent"/>
<channel name="fileDownlink.PacketsSent"/>
<channel name="pktTlm.TPK_SendLevel"/>
<channel name="fileManager.CommandsExecuted"/>
</packet>

<packet name="CDHErrors" id="2" level="1">
Expand All @@ -27,9 +30,13 @@
<channel name="fileUplink.Warnings"/>
<channel name="fileDownlink.Warnings"/>
<channel name="health.PingLateWarnings"/>
<channel name="fileManager.Errors"/>
<channel name="fileUplinkBufferManager.NoBuffs"/>
<channel name="fileUplinkBufferManager.EmptyBuffs"/>
<channel name="fileManager.Errors"/>
</packet>

<packet name="RefPkt" id="3" level="1">
<packet name="DriveTlm" id="3" level="1">
<channel name="pingRcvr.PR_NumPings"/>
<channel name="sendBuffComp.PacketsSent"/>
<channel name="sendBuffComp.NumErrorsInjected"/>
Expand All @@ -41,12 +48,70 @@
<channel name="recvBuffComp.Sensor2"/>
<channel name="recvBuffComp.Parameter1"/>
<channel name="recvBuffComp.Parameter2"/>
<channel name="blockDrv.BD_Cycles"/>
</packet>

<packet name="SigGenSum" id="4" level="1">
<channel name="SG1.Output"/>
<channel name="SG1.Type"/>
<channel name="SG2.Output"/>
<channel name="SG2.Type"/>
<channel name="SG3.Output"/>
<channel name="SG3.Type"/>
<channel name="SG4.Output"/>
<channel name="SG4.Type"/>
<channel name="SG5.Output"/>
<channel name="blockDrv.BD_Cycles"/>
<channel name="SG5.Type"/>
</packet>

<packet name="SigGen1Info" id="5" level="2">
<channel name="SG1.Info"/>
</packet>

<packet name="SigGen2Info" id="6" level="2">
<channel name="SG2.Info"/>
</packet>

<packet name="SigGen3Info" id="7" level="2">
<channel name="SG3.Info"/>
</packet>

<packet name="SigGen4Info" id="8" level="2">
<channel name="SG4.Info"/>
</packet>

<packet name="SigGen5Info" id="9" level="2">
<channel name="SG5.Info"/>
</packet>

<packet name="SigGen1" id="10" level="3">
<channel name="SG1.PairOutput"/>
<channel name="SG1.History"/>
<channel name="SG1.PairHistory"/>
</packet>

<packet name="SigGen2" id="11" level="3">
<channel name="SG2.PairOutput"/>
<channel name="SG2.History"/>
<channel name="SG2.PairHistory"/>
</packet>

<packet name="SigGen3" id="12" level="3">
<channel name="SG3.PairOutput"/>
<channel name="SG3.History"/>
<channel name="SG3.PairHistory"/>
</packet>

<packet name="SigGen4" id="13" level="3">
<channel name="SG4.PairOutput"/>
<channel name="SG4.History"/>
<channel name="SG4.PairHistory"/>
</packet>

<packet name="SigGen5" id="14" level="3">
<channel name="SG5.PairOutput"/>
<channel name="SG5.History"/>
<channel name="SG5.PairHistory"/>
</packet>

<!-- Ignored packets -->
Expand Down

0 comments on commit 6c5bfc0

Please sign in to comment.