Skip to content

Commit

Permalink
Add multi-plugin test and fix missing redis req
Browse files Browse the repository at this point in the history
  • Loading branch information
cfirth-nasa committed Aug 31, 2023
1 parent 73da159 commit 64fa59a
Show file tree
Hide file tree
Showing 9 changed files with 184 additions and 11 deletions.
42 changes: 35 additions & 7 deletions external_plugins_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,54 @@
sys.path.append('OnAIR')

import pytest
from abc import ABC, abstractmethod
from onair.src.run_scripts.execution_engine import ExecutionEngine
from medos_plugin.medos_plugin import Plugin
from generic_plugin import AIPlugIn
from driver import init_global_paths


import numpy as np

def test_plugin_defined_in_config_is_imported_by_ddl_class(mocker):
def test_single_plugin_defined_in_config_is_imported_by_ddl_class(mocker):
# Arrange

# Need to add something to pass a dummy frame of data - currently hardcoded in medos_plugin.py
config_file = 'onair/config/namaste_test.ini'
fake_save_path = ''
config_file = 'onair/config/test_single_plugin.ini'
init_global_paths() # Fix error on execution engine init where internal required paths unavailable
fake_run_name = ''


# Action
test_engine = ExecutionEngine(config_file,fake_save_path,False)

test_engine = ExecutionEngine(config_file=config_file,run_name=fake_run_name,save_flag=False) # Plugins are initialized as part of ExecutionEngine init

# Assert
assert sum([type(construct) == Plugin for construct in test_engine.sim.agent.learning_systems.ai_constructs]) == len(test_engine.sim.agent.learning_systems.ai_constructs)

# Check that each construct in ai_constructs has the required OnAIR functions
assert (getattr(construct, '__init__') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)
assert (getattr(construct, 'apriori_training') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)
assert (getattr(construct, 'update') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)
assert (getattr(construct, 'render_reasoning') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)
return

def test_list_of_plugins_defined_in_config_is_imported_by_ddl_class(mocker):
# Arrange

config_file = 'onair/config/test_multi_plugin.ini'
init_global_paths() # Fix error on execution engine init where internal required paths unavailable
fake_run_name = ''


# Action

test_engine = ExecutionEngine(config_file=config_file,run_name=fake_run_name,save_flag=False) # Plugins are initialized as part of ExecutionEngine init

# Assert

# Check that each construct in ai_constructs has the required OnAIR functions
assert (getattr(construct, '__init__') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)
assert (getattr(construct, 'apriori_training') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)
assert (getattr(construct, 'update') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)
assert (getattr(construct, 'render_reasoning') != None for construct in test_engine.sim.agent.learning_systems.ai_constructs)


return
2 changes: 1 addition & 1 deletion generic_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def update(self, frame=[]):
raise NotImplementedError

@abstractmethod
def render_diagnosis(self):
def render_reasoning(self):
"""
System should return its diagnosis
"""
Expand Down
1 change: 1 addition & 0 deletions kalman_plugin/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .kalman_plugin import Plugin
124 changes: 124 additions & 0 deletions kalman_plugin/kalman_plugin.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
# GSC-19165-1, "The On-Board Artificial Intelligence Research (OnAIR) Platform"
#
# Copyright © 2023 United States Government as represented by the Administrator of
# the National Aeronautics and Space Administration. No copyright is claimed in the
# United States under Title 17, U.S. Code. All Other Rights Reserved.
#
# Licensed under the NASA Open Source Agreement version 1.3
# See "NOSA GSC-19165-1 OnAIR.pdf"

import simdkalman
import numpy as np
from generic_plugin import AIPlugIn

class Plugin(AIPlugIn):
def __init__(self, name, headers, window_size=3):
"""
:param headers: (int) length of time agent examines
:param window_size: (int) size of time window to examine
"""
super().__init__(name, headers)
self.frames = []
self.component_name = name
self.headers = headers
self.window_size = window_size

self.kf = simdkalman.KalmanFilter(
state_transition = [[1,1],[0,1]], # matrix A
process_noise = np.diag([0.1, 0.01]), # Q
observation_model = np.array([[1,0]]), # H
observation_noise = 1.0) # R

#### START: Classes mandated by plugin architecture
def apriori_training(self):
pass

def update(self, frame):
"""
:param frame: (list of floats) input sequence of len (input_dim)
:return: None
"""
for data_point_index in range(len(frame)):
if len(self.frames) < len(frame): # If the frames variable is empty, append each data point in frame to it, each point wrapped as a list
# This is done so the data can have each attribute grouped in one list before being passed to kalman
# Ex: [[1:00, 1:01, 1:02, 1:03, 1:04, 1:05], [1, 2, 3, 4, 5]]
self.frames.append([frame[data_point_index]])
else:
self.frames[data_point_index].append(frame[data_point_index])
if len(self.frames[data_point_index]) > self.window_size: # If after adding a point to the frame, that attribute is larger than the window_size, take out the first element
self.frames[data_point_index].pop(0)

def render_reasoning(self):
"""
System should return its diagnosis
"""
broken_attributes = self.frame_diagnosis(self.frames, self.headers)
return broken_attributes
#### END: Classes mandated by plugin architecture

# Gets mean of values
def mean(self, values):
return sum(values)/len(values)

# Gets absolute value residual from actual and predicted value
def residual(self, predicted, actual):
return abs(float(actual) - float(predicted))

#Gets standard deviation of data
def std_dev(self, data):
return np.std(data)

# Takes in the kf being used, the data, how many prediction "steps" it will make, and an optional initial value
# Gives a prediction values based on given parameters
def predict(self, data, forward_steps, inital_val = None):
for i in range(len(data)):
data[i] = float(data[i]) # Makes sure all the data being worked with is a float
if(inital_val != None):
smoothed = self.kf.smooth(data, initial_value = [float(inital_val),0]) # If there's an initial value, smooth it along that value
else:
smoothed = self.kf.smooth(data) # If not, smooth it however you like
predicted = self.kf.predict(data, forward_steps) # Make a prediction on the smoothed data
return predicted

def predictions_for_given_data(self, data):
returned_data = []
initial_val = data[0]
for item in range(len(data)-1):
predicted = self.predict(data[0:item+1], 1, initial_val)
actual_next_state = data[item+1]
pred_mean = predicted.observations.mean
returned_data.append(pred_mean)
if(len(returned_data) == 0): # If there's not enough data just set it to 0
returned_data.append(0)
return returned_data

# Get data, make predictions, and then find the errors for these predictions
def generate_residuals_for_given_data(self, data):
residuals = []
initial_val = data[0]
for item in range(len(data)-1):
predicted = self.predict(data[0:item+1], 1, initial_val)
actual_next_state = data[item+1]
pred_mean = predicted.observations.mean
residual_error = float(self.residual(pred_mean, actual_next_state))
residuals.append(residual_error)
if(len(residuals) == 0): # If there are no residuals because the data is of length 1, then just say residuals is equal to [0]
residuals.append(0)
return residuals

#Info: takes a chunk of data of n size. Walks through it and gets residual errors.
#Takes the mean of the errors and determines if they're too large overall in order to determine whether or not there's a chunk in said error.
def current_attribute_chunk_get_error(self, data):
residuals = self.generate_residuals_for_given_data(data)
mean_residuals = abs(self.mean(residuals))
if (abs(mean_residuals) < 1.5):
return False
return True

def frame_diagnosis(self, frame, headers):
kal_broken_attributes = []
for attribute_index in range(len(frame)):
error = self.current_attribute_chunk_get_error(frame[attribute_index])
if error and not headers[attribute_index].upper() == 'TIME':
kal_broken_attributes.append(headers[attribute_index])
return kal_broken_attributes
2 changes: 1 addition & 1 deletion medos_plugin/medos_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def update(self, frame=[]): # send MEDOS new datapoint
reconstruct = dict(zip(self.headers[1:], frame))
self.drone_state_dict = reconstruct

def render_diagnosis(self): # return event detections
def render_reasoning(self): # return event detections
"""
System should return its diagnosis
"""
Expand Down
18 changes: 18 additions & 0 deletions onair/config/test_multi_plugin.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[DEFAULT]
TelemetryDataFilePath = onair/data/raw_telemetry_data/
TelemetryMetadataFilePath = onair/data/telemetry_configs/
MetaFiles = ['namaste_TLM_CONFIG.json']
TelemetryFiles = ['namaste_TLM.txt']
ParserFileName = forty_two_parser
ParserName = FortyTwo
SimName = CSV
PluginList = ['medos','kalman']

[RUN_FLAGS]
IO_Flag = true
Dev_Flag = false
SBN_Flag = false
Viz_Flag = false
Redis_Flag = true


Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@ SBN_Flag = false
Viz_Flag = false
Redis_Flag = true


2 changes: 1 addition & 1 deletion onair/src/data_driven_components/data_driven_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def __init__(self, headers, _ai_plugins:list=[]):
assert(len(headers)>0)
self.headers = headers
self.ai_constructs = [
importlib.import_module('onair.src.data_driven_components.' + plugin_name + '.' + f'{plugin_name}_plugin').Plugin(plugin_name, headers) for plugin_name in _ai_plugins
importlib.import_module(f'{plugin_name}_plugin').Plugin(f'{plugin_name}_plugin', headers) for plugin_name in _ai_plugins
]

def update(self, curr_data, status):
Expand Down
3 changes: 2 additions & 1 deletion requirements_pip.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@ orjson==3.8.8
pandas==1.5.1
pytest==7.2.0
pytest-mock==3.10.0
pytest-randomly==3.12.0
pytest-randomly==3.12.0
redis==3.5.3

0 comments on commit 64fa59a

Please sign in to comment.