From 720b650f8effa7b397db70bd4b9aa8d5952b2fb0 Mon Sep 17 00:00:00 2001 From: Vivek Reddy Date: Mon, 30 Aug 2021 15:23:12 -0700 Subject: [PATCH] [debug dump util] Module implementation Logic and Port Module (#1667) * Port Module and UT Added What I did HLD for Dump Utility: HLD. For More Info on adding new modules, Check this section in the HLD: MatchInfra Signed-off-by: Vivek Reddy Karri --- dump/match_infra.py | 190 +++++++++++++-------- dump/plugins/__init__.py | 15 ++ dump/plugins/executor.py | 26 +++ dump/plugins/port.py | 76 +++++++++ tests/dump_input/port/appl_db.json | 35 ++++ tests/dump_input/port/asic_db.json | 29 ++++ tests/dump_input/port/config_db.json | 30 ++++ tests/dump_input/port/state_db.json | 14 ++ tests/dump_tests/match_engine_test.py | 100 +++++------ tests/dump_tests/module_tests/__init__.py | 0 tests/dump_tests/module_tests/port_test.py | 162 ++++++++++++++++++ 11 files changed, 556 insertions(+), 121 deletions(-) create mode 100644 dump/plugins/__init__.py create mode 100644 dump/plugins/executor.py create mode 100644 dump/plugins/port.py create mode 100644 tests/dump_input/port/appl_db.json create mode 100644 tests/dump_input/port/asic_db.json create mode 100644 tests/dump_input/port/config_db.json create mode 100644 tests/dump_input/port/state_db.json create mode 100644 tests/dump_tests/module_tests/__init__.py create mode 100644 tests/dump_tests/module_tests/port_test.py diff --git a/dump/match_infra.py b/dump/match_infra.py index fe0d8126a948..0b4ca0490a39 100644 --- a/dump/match_infra.py +++ b/dump/match_infra.py @@ -1,4 +1,5 @@ -import json, fnmatch +import json +import fnmatch from abc import ABC, abstractmethod from dump.helper import verbose_print from swsscommon.swsscommon import SonicV2Connector, SonicDBConfig @@ -12,9 +13,9 @@ "NO_SRC": "Either one of db or file in the request should be non-empty", "NO_TABLE": "No 'table' name provided", "NO_KEY": "'key_pattern' cannot be empty", - "NO_VALUE" : "Field is provided, but no value is provided to compare with", + "NO_VALUE": "Field is provided, but no value is provided to compare with", "SRC_VAGUE": "Only one of db or file should be provided", - "CONN_ERR" : "Connection Error", + "CONN_ERR": "Connection Error", "JUST_KEYS_COMPAT": "When Just_keys is set to False, return_fields should be empty", "BAD_FORMAT_RE_FIELDS": "Return Fields should be of list type", "NO_ENTRIES": "No Keys found after applying the filtering criteria", @@ -22,14 +23,15 @@ "INV_NS": "Namespace is invalid" } + class MatchRequest: - """ - Request Object which should be passed to the MatchEngine - + """ + Request Object which should be passed to the MatchEngine + Attributes: "table" : A Valid Table Name "key_pattern" : Pattern of the redis-key to match. Defaults to "*". Eg: "*" will match all the keys. - Supports these glob style patterns. https://redis.io/commands/KEYS + Supports these glob style patterns. https://redis.io/commands/KEYS "field" : Field to check for a match,Defaults to None "value" : Value to match, Defaults to None "return_fields" : An iterable type, where each element woudld imply a field to return from all the filtered keys @@ -38,9 +40,10 @@ class MatchRequest: Only one of the db/file fields should have a non-empty string. "just_keys" : If true, Only Returns the keys matched. Does not return field-value pairs. Defaults to True "ns" : namespace argument, if nothing is provided, default namespace is used - "match_entire_list" : When this arg is set to true, entire list is matched incluing the ",". + "match_entire_list" : When this arg is set to true, entire list is matched incluing the ",". When False, the values are split based on "," and individual items are matched with """ + def __init__(self, **kwargs): self.table = kwargs["table"] if "table" in kwargs else None self.key_pattern = kwargs["key_pattern"] if "key_pattern" in kwargs else "*" @@ -56,16 +59,15 @@ def __init__(self, **kwargs): verbose_print(str(err)) if err: raise Exception("Static Checks for the MatchRequest Failed, Reason: \n" + err) - - + def __static_checks(self): - + if not self.db and not self.file: return EXCEP_DICT["NO_SRC"] - + if self.db and self.file: return EXCEP_DICT["SRC_VAGUE"] - + if not self.db: try: with open(self.file) as f: @@ -75,32 +77,32 @@ def __static_checks(self): if not self.file and self.db not in SonicDBConfig.getDbList(): return EXCEP_DICT["INV_DB"] - + if not self.table: return EXCEP_DICT["NO_TABLE"] - + if not isinstance(self.return_fields, list): return EXCEP_DICT["BAD_FORMAT_RE_FIELDS"] - + if not self.just_keys and self.return_fields: return EXCEP_DICT["JUST_KEYS_COMPAT"] - + if self.field and not self.value: return EXCEP_DICT["NO_VALUE"] - + if self.ns != DEFAULT_NAMESPACE and self.ns not in multi_asic.get_namespace_list(): return EXCEP_DICT["INV_NS"] + " Choose From {}".format(multi_asic.get_namespace_list()) - + verbose_print("MatchRequest Checks Passed") - + return "" - + def __str__(self): str = "----------------------- \n MatchRequest: \n" if self.db: str += "db:{} , ".format(self.db) if self.file: - str += "file:{} , ".format(self.file) + str += "file:{} , ".format(self.file) if self.table: str += "table:{} , ".format(self.table) if self.key_pattern: @@ -116,78 +118,76 @@ def __str__(self): if len(self.return_fields) > 0: str += "return_fields: " + ",".join(self.return_fields) + " " if self.ns: - str += "namespace: , " + self.ns + str += "namespace: , " + self.ns if self.match_entire_list: str += "match_list: True , " else: str += "match_list: False , " return str - + + class SourceAdapter(ABC): """ Source Adaptor offers unified interface to Data Sources """ - + def __init__(self): pass - + @abstractmethod def connect(self, db, ns): """ Return True for Success, False for failure """ return False - + @abstractmethod def getKeys(self, db, table, key_pattern): return [] - + @abstractmethod def get(self, db, key): return {} - + @abstractmethod def hget(self, db, key, field): return "" - + @abstractmethod def get_separator(self, db): return "" - + + class RedisSource(SourceAdapter): """ Concrete Adaptor Class for connecting to Redis Data Sources """ - - def __init__(self): - self.conn = None - + + def __init__(self, conn_pool): + self.conn = None + self.pool = conn_pool + def connect(self, db, ns): try: - if not SonicDBConfig.isInit(): - if multi_asic.is_multi_asic(): - SonicDBConfig.load_sonic_global_db_config() - else: - SonicDBConfig.load_sonic_db_config() - self.conn = SonicV2Connector(namespace=ns, use_unix_socket_path=True) - self.conn.connect(db) + self.conn = self.pool.get(db, ns) except Exception as e: verbose_print("RedisSource: Connection Failed\n" + str(e)) return False return True - + def get_separator(self, db): return self.conn.get_db_separator(db) - - def getKeys(self, db, table, key_pattern): + + def getKeys(self, db, table, key_pattern): return self.conn.keys(db, table + self.get_separator(db) + key_pattern) - + def get(self, db, key): return self.conn.get_all(db, key) - + def hget(self, db, key, field): return self.conn.get(db, key, field) + class JsonSource(SourceAdapter): """ Concrete Adaptor Class for connecting to JSON Data Sources """ - + def __init__(self): self.json_data = None - + def connect(self, db, ns): try: with open(db) as f: @@ -196,10 +196,10 @@ def connect(self, db, ns): verbose_print("JsonSource: Loading the JSON file failed" + str(e)) return False return True - + def get_separator(self, db): return SonicDBConfig.getSeparator("CONFIG_DB") - + def getKeys(self, db, table, key_pattern): if table not in self.json_data: return [] @@ -207,48 +207,95 @@ def getKeys(self, db, table, key_pattern): kp = key_pattern.replace("[^", "[!") kys = fnmatch.filter(self.json_data[table].keys(), kp) return [table + self.get_separator(db) + ky for ky in kys] - + def get(self, db, key): sep = self.get_separator(db) table, key = key.split(sep, 1) return self.json_data.get(table, {}).get(key, {}) - + def hget(self, db, key, field): sep = self.get_separator(db) table, key = key.split(sep, 1) return self.json_data.get(table, "").get(key, "").get(field, "") - + + +class ConnectionPool: + """ Caches SonicV2Connector objects for effective reuse """ + def __init__(self): + self.cache = dict() # Pool of SonicV2Connector objects + + def initialize_connector(self, ns): + if not SonicDBConfig.isInit(): + if multi_asic.is_multi_asic(): + SonicDBConfig.load_sonic_global_db_config() + else: + SonicDBConfig.load_sonic_db_config() + return SonicV2Connector(namespace=ns, use_unix_socket_path=True) + + def get(self, db_name, ns, update=False): + """ Returns a SonicV2Connector Object and caches it for further requests """ + if ns not in self.cache: + self.cache[ns] = {} + self.cache[ns]["conn"] = self.initialize_connector(ns) + self.cache[ns]["connected_to"] = set() + if update or db_name not in self.cache[ns]["connected_to"]: + self.cache[ns]["conn"].connect(db_name) + self.cache[ns]["connected_to"].add(db_name) + return self.cache[ns]["conn"] + + def clear(self, namespace=None): + if not namespace: + self.cache.clear() + elif namespace in self.cache: + del self.cache[namespace] + + class MatchEngine: - """ Pass in a MatchRequest, to fetch the Matched dump from the Data sources """ - + """ + Provide a MatchRequest to fetch the relevant keys/fv's from the data source + Usage Guidelines: + 1) Instantiate the class once for the entire execution, + to effectively use the caching of redis connection objects + """ + def __init__(self, pool=None): + if not isinstance(pool, ConnectionPool): + self.conn_pool = ConnectionPool() + else: + self.conn_pool = pool + + def clear_cache(self, ns): + self.conn_pool(ns) + def __get_source_adapter(self, req): src = None d_src = "" if req.db: d_src = req.db - src = RedisSource() + src = RedisSource(self.conn_pool) else: d_src = req.file src = JsonSource() return d_src, src - + def __create_template(self): - return {"error" : "", "keys" : [], "return_values" : {}} - + return {"error": "", "keys": [], "return_values": {}} + def __display_error(self, err): template = self.__create_template() template['error'] = err verbose_print("MatchEngine: \n" + template['error']) return template - + def __filter_out_keys(self, src, req, all_matched_keys): # TODO: Custom Callbacks for Complex Matching Criteria if not req.field: return all_matched_keys - + filtered_keys = [] for key in all_matched_keys: f_values = src.hget(req.db, key, req.field) + if not f_values: + continue if "," in f_values and not req.match_entire_list: f_value = f_values.split(",") else: @@ -256,7 +303,7 @@ def __filter_out_keys(self, src, req, all_matched_keys): if req.value in f_value: filtered_keys.append(key) return filtered_keys - + def __fill_template(self, src, req, filtered_keys, template): for key in filtered_keys: temp = {} @@ -266,35 +313,34 @@ def __fill_template(self, src, req, filtered_keys, template): elif len(req.return_fields) > 0: template["keys"].append(key) template["return_values"][key] = {} - for field in req.return_fields: + for field in req.return_fields: template["return_values"][key][field] = src.hget(req.db, key, field) else: template["keys"].append(key) verbose_print("Return Values:" + str(template["return_values"])) return template - + def fetch(self, req): """ Given a request obj, find its match in the data source provided """ if not isinstance(req, MatchRequest): return self.__display_error(EXCEP_DICT["INV_REQ"]) - + verbose_print(str(req)) - + if not req.key_pattern: return self.__display_error(EXCEP_DICT["NO_KEY"]) - + d_src, src = self.__get_source_adapter(req) if not src.connect(d_src, req.ns): return self.__display_error(EXCEP_DICT["CONN_ERR"]) - + template = self.__create_template() all_matched_keys = src.getKeys(req.db, req.table, req.key_pattern) if not all_matched_keys: return self.__display_error(EXCEP_DICT["NO_MATCHES"]) - + filtered_keys = self.__filter_out_keys(src, req, all_matched_keys) verbose_print("Filtered Keys:" + str(filtered_keys)) if not filtered_keys: return self.__display_error(EXCEP_DICT["NO_ENTRIES"]) - return self.__fill_template(src, req, filtered_keys, template) - + return self.__fill_template(src, req, filtered_keys, template) \ No newline at end of file diff --git a/dump/plugins/__init__.py b/dump/plugins/__init__.py new file mode 100644 index 000000000000..2141e4fec82b --- /dev/null +++ b/dump/plugins/__init__.py @@ -0,0 +1,15 @@ +import os +import sys +import pkgutil +import importlib +from .executor import Executor + +dump_modules = {} +pkg_dir = os.path.dirname(__file__) + +# import child classes automatically +for (module_loader, name, ispkg) in pkgutil.iter_modules([pkg_dir]): + importlib.import_module('.' + name, __package__) + +# Classes inheriting Executor +dump_modules = {cls.__name__.lower(): cls for cls in Executor.__subclasses__()} diff --git a/dump/plugins/executor.py b/dump/plugins/executor.py new file mode 100644 index 000000000000..73c324de8091 --- /dev/null +++ b/dump/plugins/executor.py @@ -0,0 +1,26 @@ +from abc import ABC, abstractmethod +from dump.match_infra import MatchEngine + + +class Executor(ABC): + """ + Abstract Class which should be extended from in + order to be included in the dump state CLI + """ + + ARG_NAME = "id" # Arg Identifier + CONFIG_FILE = "" # Path to config file, if any + + def __init__(self, match_engine=None): + if not isinstance(match_engine, MatchEngine): + self.match_engine = MatchEngine(None) + else: + self.match_engine = match_engine + + @abstractmethod + def execute(self, params): + pass + + @abstractmethod + def get_all_args(self, ns): + pass diff --git a/dump/plugins/port.py b/dump/plugins/port.py new file mode 100644 index 000000000000..f8422c9c691c --- /dev/null +++ b/dump/plugins/port.py @@ -0,0 +1,76 @@ +from dump.match_infra import MatchRequest +from dump.helper import create_template_dict +from .executor import Executor + + +class Port(Executor): + """ + Debug Dump Plugin for PORT Module + """ + ARG_NAME = "port_name" + + def __init__(self, match_engine=None): + super().__init__(match_engine) + + def get_all_args(self, ns=""): + req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", ns=ns) + ret = self.match_engine.fetch(req) + all_ports = ret["keys"] + return [key.split("|")[-1] for key in all_ports] + + def execute(self, params): + self.ret_temp = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + port_name = params[Port.ARG_NAME] + self.ns = params["namespace"] + self.init_port_config_info(port_name) + self.init_port_appl_info(port_name) + port_asic_obj = self.init_asic_hostif_info(port_name) + self.init_asic_port_info(port_asic_obj) + self.init_state_port_info(port_name) + return self.ret_temp + + def add_to_ret_template(self, table, db, keys, err): + if not err and keys: + self.ret_temp[db]["keys"].extend(keys) + return True + else: + self.ret_temp[db]["tables_not_found"].extend([table]) + return False + + def init_port_config_info(self, port_name): + req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern=port_name, ns=self.ns) + ret = self.match_engine.fetch(req) + self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_port_appl_info(self, port_name): + req = MatchRequest(db="APPL_DB", table="PORT_TABLE", key_pattern=port_name, ns=self.ns) + ret = self.match_engine.fetch(req) + self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_state_port_info(self, port_name): + req = MatchRequest(db="STATE_DB", table="PORT_TABLE", key_pattern=port_name, ns=self.ns) + ret = self.match_engine.fetch(req) + self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_asic_hostif_info(self, port_name): + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", key_pattern="*", field="SAI_HOSTIF_ATTR_NAME", + value=port_name, return_fields=["SAI_HOSTIF_ATTR_OBJ_ID"], ns=self.ns) + ret = self.match_engine.fetch(req) + asic_port_obj_id = "" + + if not ret["error"] and len(ret["keys"]) != 0: + self.ret_temp[req.db]["keys"] = ret["keys"] + sai_hostif_obj_key = ret["keys"][-1] + if sai_hostif_obj_key in ret["return_values"] and "SAI_HOSTIF_ATTR_OBJ_ID" in ret["return_values"][sai_hostif_obj_key]: + asic_port_obj_id = ret["return_values"][sai_hostif_obj_key]["SAI_HOSTIF_ATTR_OBJ_ID"] + else: + self.ret_temp[req.db]["tables_not_found"] = [req.table] + return asic_port_obj_id + + def init_asic_port_info(self, asic_port_obj_id): + if not asic_port_obj_id: + self.ret_temp["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT") + return None + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_PORT", key_pattern=asic_port_obj_id, ns=self.ns) + ret = self.match_engine.fetch(req) + self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) diff --git a/tests/dump_input/port/appl_db.json b/tests/dump_input/port/appl_db.json new file mode 100644 index 000000000000..6dca414473dd --- /dev/null +++ b/tests/dump_input/port/appl_db.json @@ -0,0 +1,35 @@ +{ + "PORT_TABLE:Ethernet176": { + "index": "0", + "lanes": "0", + "alias": "etp45", + "speed": "25000", + "oper_status": "up", + "pfc_asym": "off", + "mtu": "9100", + "fec": "rs", + "admin_status": "up" + }, + "PORT_TABLE:Ethernet160": { + "index": "0", + "lanes": "0", + "alias": "etp41", + "speed": "25000", + "oper_status": "up", + "pfc_asym": "off", + "mtu": "9100", + "fec": "rs", + "admin_status": "up" + }, + "PORT_TABLE:Ethernet164": { + "index": "0", + "lanes": "0", + "alias": "etp42", + "speed": "25000", + "oper_status": "up", + "pfc_asym": "off", + "mtu": "9100", + "fec": "rs", + "admin_status": "up" + } +} \ No newline at end of file diff --git a/tests/dump_input/port/asic_db.json b/tests/dump_input/port/asic_db.json new file mode 100644 index 000000000000..db9e86128b7d --- /dev/null +++ b/tests/dump_input/port/asic_db.json @@ -0,0 +1,29 @@ +{ + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4d":{ + "SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x100000000036a", + "SAI_HOSTIF_ATTR_NAME" : "Ethernet176", + "SAI_HOSTIF_ATTR_OPER_STATUS" : "true" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x100000000036a": { + "SAI_PORT_ATTR_ADMIN_STATE" : "true", + "SAI_PORT_ATTR_SPEED" : "25000", + "SAI_PORT_ATTR_MTU" : "9122" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a49":{ + "SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000002e6", + "SAI_HOSTIF_ATTR_NAME" : "Ethernet160", + "SAI_HOSTIF_ATTR_OPER_STATUS" : "true" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4a":{ + "SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x1000000000307", + "SAI_HOSTIF_ATTR_OPER_STATUS" : "true" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x1000000000307": { + "SAI_PORT_ATTR_ADMIN_STATE" : "true", + "SAI_PORT_ATTR_SPEED" : "25000", + "SAI_PORT_ATTR_MTU" : "9122" + } +} diff --git a/tests/dump_input/port/config_db.json b/tests/dump_input/port/config_db.json new file mode 100644 index 000000000000..890520f4b1eb --- /dev/null +++ b/tests/dump_input/port/config_db.json @@ -0,0 +1,30 @@ +{ + "PORT|Ethernet176": { + "admin_status" : "up", + "alias": "etp45", + "index": "45", + "lanes": "176", + "speed": "25000" + }, + "PORT|Ethernet164": { + "admin_status" : "up", + "alias": "etp42", + "index": "42", + "lanes": "164", + "speed": "25000" + }, + "PORT|Ethernet160": { + "admin_status" : "up", + "alias": "etp41", + "index": "41", + "lanes": "160", + "speed": "25000" + }, + "PORT|Ethernet156": { + "admin_status" : "up", + "alias": "etp40", + "index": "40", + "lanes": "156", + "speed": "25000" + } +} diff --git a/tests/dump_input/port/state_db.json b/tests/dump_input/port/state_db.json new file mode 100644 index 000000000000..9db2d8f8c21c --- /dev/null +++ b/tests/dump_input/port/state_db.json @@ -0,0 +1,14 @@ +{ + "PORT_TABLE|Ethernet176":{ + "state" : "ok", + "netdev_oper_status" : "up" + }, + "PORT_TABLE|Ethernet160":{ + "state" : "ok", + "netdev_oper_status" : "up" + }, + "PORT_TABLE|Ethernet164":{ + "state" : "ok", + "netdev_oper_status" : "up" + } +} diff --git a/tests/dump_tests/match_engine_test.py b/tests/dump_tests/match_engine_test.py index a4d4330b9b4d..5575bc412792 100644 --- a/tests/dump_tests/match_engine_test.py +++ b/tests/dump_tests/match_engine_test.py @@ -1,4 +1,5 @@ -import os, sys +import os +import sys import unittest import pytest from dump.match_infra import MatchEngine, EXCEP_DICT, MatchRequest @@ -10,6 +11,7 @@ sys.path.append(test_path) + @pytest.fixture(scope="module", autouse=True) def mock_setup(): print("SETUP") @@ -20,11 +22,11 @@ def mock_setup(): class TestMatchRequestValidation(unittest.TestCase): - + def __init__(self, *args, **kwargs): super(TestMatchRequestValidation, self).__init__(*args, **kwargs) self.match_engine = MatchEngine() - + def assertRaisesWithMessage(self, msg, func, *args, **kwargs): try: func(*args, **kwargs) @@ -32,111 +34,111 @@ def assertRaisesWithMessage(self, msg, func, *args, **kwargs): except Exception as inst: print(inst) assert msg in str(inst) - + def test_bad_request(self): req = [] ret = self.match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["INV_REQ"] - + def test_no_source(self): self.assertRaisesWithMessage(EXCEP_DICT["NO_SRC"], MatchRequest) - - def test_vague_source(self): + + def test_vague_source(self): self.assertRaisesWithMessage(EXCEP_DICT["SRC_VAGUE"], MatchRequest, db="CONFIG_DB", file="/etc/sonic/copp_cfg.json") - - def test_no_file(self): + + def test_no_file(self): self.assertRaisesWithMessage(EXCEP_DICT["FILE_R_EXEP"], MatchRequest, file=os.path.join(test_path, "random_db.json")) - def test_invalid_db(self): + def test_invalid_db(self): self.assertRaisesWithMessage(EXCEP_DICT["INV_DB"], MatchRequest, db="CONFIGURATION_DB") - + def test_invalid_namespace(self): - self.assertRaisesWithMessage(EXCEP_DICT["INV_NS"], MatchRequest, db="APPL_DB", table="PORT_TABLE", + self.assertRaisesWithMessage(EXCEP_DICT["INV_NS"], MatchRequest, db="APPL_DB", table="PORT_TABLE", field="lanes", value="202", ns="asic4") - + def test_bad_key_pattern(self): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="") ret = self.match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_KEY"] - + def test_no_value(self): self.assertRaisesWithMessage(EXCEP_DICT["NO_VALUE"], MatchRequest, db="APPL_DB", table="COPP_TABLE", key_pattern="*", field="trap_ids", value="") - + def test_no_table(self): self.assertRaisesWithMessage(EXCEP_DICT["NO_TABLE"], MatchRequest, db="APPL_DB", table="", key_pattern="*", field="trap_ids", value="bgpv6") - + def test_just_keys_return_fields_compat(self): - self.assertRaisesWithMessage(EXCEP_DICT["JUST_KEYS_COMPAT"], MatchRequest, db="APPL_DB", return_fields=["trap_group"], table="COPP_TABLE", + self.assertRaisesWithMessage(EXCEP_DICT["JUST_KEYS_COMPAT"], MatchRequest, db="APPL_DB", return_fields=["trap_group"], table="COPP_TABLE", key_pattern="*", field="trap_ids", value="", just_keys=False) - + def test_invalid_combination(self): req = MatchRequest(db="CONFIG_DB", table="COPP_TRAP", key_pattern="*", field="trap_ids", value="sample_packet") ret = self.match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_MATCHES"] - + def test_return_fields_bad_format(self): self.assertRaisesWithMessage(EXCEP_DICT["BAD_FORMAT_RE_FIELDS"], MatchRequest, db="STATE_DB", table="REBOOT_CAUSE", key_pattern="*", return_fields="cause") - + def test_valid_match_request(self): try: req = MatchRequest(db="APPL_DB", table="PORT_TABLE", field="lanes", value="202") except Exception as e: assert False, "Exception Raised for a Valid MatchRequest" + str(e) - + class TestMatchEngine(unittest.TestCase): - + def __init__(self, *args, **kwargs): super(TestMatchEngine, self).__init__(*args, **kwargs) self.match_engine = MatchEngine() - + def test_key_pattern_wildcard(self): req = MatchRequest(db="CONFIG_DB", table="SFLOW_COLLECTOR", key_pattern="*") ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 2 assert "SFLOW_COLLECTOR|ser5" in ret['keys'] - assert "SFLOW_COLLECTOR|prod" in ret['keys'] - + assert "SFLOW_COLLECTOR|prod" in ret['keys'] + def test_key_pattern_complex(self): req = MatchRequest(db="CONFIG_DB", table="ACL_RULE", key_pattern="EVERFLOW*") ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 2 assert "ACL_RULE|EVERFLOW|RULE_6" in ret['keys'] - assert "ACL_RULE|EVERFLOW|RULE_08" in ret['keys'] - + assert "ACL_RULE|EVERFLOW|RULE_08" in ret['keys'] + def test_field_value_match(self): req = MatchRequest(db="CONFIG_DB", table="ACL_TABLE", field="policy_desc", value="SSH_ONLY") ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "ACL_TABLE|SSH_ONLY" in ret['keys'] - + def test_field_value_match_list_type(self): req = MatchRequest(db="APPL_DB", table="PORT_TABLE", field="lanes", value="202") ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "PORT_TABLE:Ethernet200" in ret['keys'] - + def test_for_no_match(self): req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_SWITCH", field="SAI_SWITCH_ATTR_SRC_MAC_ADDRESS", value="DE:AD:EE:EE:EE") ret = self.match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_ENTRIES"] assert len(ret["keys"]) == 0 - + def test_for_no_key_match(self): req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_SWITCH", key_pattern="oid:0x22*") ret = self.match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_MATCHES"] - + def test_field_value_no_match(self): req = MatchRequest(db="STATE_DB", table="FAN_INFO", key_pattern="*", field="led_status", value="yellow") ret = self.match_engine.fetch(req) assert ret["error"] == EXCEP_DICT["NO_ENTRIES"] assert len(ret["keys"]) == 0 - + def test_return_keys(self): req = MatchRequest(db="STATE_DB", table="REBOOT_CAUSE", return_fields=["cause"]) ret = self.match_engine.fetch(req) @@ -144,21 +146,21 @@ def test_return_keys(self): assert len(ret["keys"]) == 2 assert "warm-reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_04_53_58"]["cause"] assert "reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_02_33_06"]["cause"] - + def test_return_fields_with_key_filtering(self): req = MatchRequest(db="STATE_DB", table="REBOOT_CAUSE", key_pattern="2020_10_09_02*", return_fields=["cause"]) ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "reboot" == ret["return_values"]["REBOOT_CAUSE|2020_10_09_02_33_06"]["cause"] - + def test_return_fields_with_field_value_filtering(self): req = MatchRequest(db="STATE_DB", table="CHASSIS_MODULE_TABLE", field="oper_status", value="Offline", return_fields=["slot"]) ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "18" == ret["return_values"]["CHASSIS_MODULE_TABLE|FABRIC-CARD1"]["slot"] - + def test_return_fields_with_all_filtering(self): req = MatchRequest(db="STATE_DB", table="VXLAN_TUNNEL_TABLE", key_pattern="EVPN_25.25.25.2*", field="operstatus", value="down", return_fields=["src_ip"]) ret = self.match_engine.fetch(req) @@ -167,7 +169,7 @@ def test_return_fields_with_all_filtering(self): assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.25"]["src_ip"] assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.26"]["src_ip"] assert "1.1.1.1" == ret["return_values"]["VXLAN_TUNNEL_TABLE|EVPN_25.25.25.27"]["src_ip"] - + def test_just_keys_false(self): req = MatchRequest(db="CONFIG_DB", table="SFLOW", key_pattern="global", just_keys=False) ret = self.match_engine.fetch(req) @@ -178,15 +180,15 @@ def test_just_keys_false(self): exp_dict = {"SFLOW|global": {"admin_state": "up", "polling_interval": "0"}} ddiff = DeepDiff(exp_dict, recv_dict) assert not ddiff, ddiff - + def test_file_source(self): file = os.path.join(dump_test_input, "copp_cfg.json") req = MatchRequest(file=file, table="COPP_TRAP", field="trap_ids", value="arp_req") ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 - assert "COPP_TRAP|arp" in ret["keys"] - + assert "COPP_TRAP|arp" in ret["keys"] + def test_file_source_with_key_ptrn(self): file = os.path.join(dump_test_input, "copp_cfg.json") req = MatchRequest(file=file, table="COPP_GROUP", key_pattern="queue4*", field="red_action", value="drop") @@ -194,7 +196,7 @@ def test_file_source_with_key_ptrn(self): assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "COPP_GROUP|queue4_group2" in ret["keys"] - + def test_file_source_with_not_only_return_keys(self): file = os.path.join(dump_test_input, "copp_cfg.json") req = MatchRequest(file=file, table="COPP_GROUP", key_pattern="queue4*", field="red_action", value="drop", just_keys=False) @@ -205,17 +207,17 @@ def test_file_source_with_not_only_return_keys(self): exp_dict = {"COPP_GROUP|queue4_group2": {"trap_action": "copy", "trap_priority": "4", "queue": "4", "meter_type": "packets", "mode": "sr_tcm", "cir": "600", "cbs": "600", "red_action": "drop"}} ddiff = DeepDiff(exp_dict, recv_dict) assert not ddiff, ddiff - + def test_match_entire_list(self): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", field="lanes", value="61,62,63,64", match_entire_list=True, just_keys=True) ret = self.match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 - assert "PORT|Ethernet60" in ret["keys"] - + assert "PORT|Ethernet60" in ret["keys"] + class TestNonDefaultNameSpace(unittest.TestCase): - + @classmethod def setup_class(cls): print("SETUP") @@ -225,12 +227,12 @@ def setup_class(cls): reload(mock_multi_asic) from ..mock_tables import dbconnector dbconnector.load_namespace_config() - + def teardown_class(cls): print("TEARDOWN") os.environ["UTILITIES_UNIT_TESTING"] = "0" - os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" - + os.environ["UTILITIES_UNIT_TESTING_TOPOLOGY"] = "" + def test_namespace_asic0(self): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", field="asic_port_name", value="Eth0-ASIC0", ns="asic0") match_engine = MatchEngine() @@ -238,11 +240,11 @@ def test_namespace_asic0(self): assert ret["error"] == "" assert len(ret["keys"]) == 1 assert "PORT|Ethernet0" in ret["keys"] - + def test_namespace_asic1(self): req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="Ethernet-BP256", ns="asic1") match_engine = MatchEngine() ret = match_engine.fetch(req) assert ret["error"] == "" assert len(ret["keys"]) == 1 - assert "PORT|Ethernet-BP256" in ret["keys"] + assert "PORT|Ethernet-BP256" in ret["keys"] diff --git a/tests/dump_tests/module_tests/__init__.py b/tests/dump_tests/module_tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/dump_tests/module_tests/port_test.py b/tests/dump_tests/module_tests/port_test.py new file mode 100644 index 000000000000..ebed245e31f7 --- /dev/null +++ b/tests/dump_tests/module_tests/port_test.py @@ -0,0 +1,162 @@ +import json +import os +import sys +import jsonpatch +import unittest +import pytest +from deepdiff import DeepDiff +from mock import patch +from dump.helper import create_template_dict, sort_lists +from dump.plugins.port import Port +from dump.match_infra import MatchEngine, ConnectionPool +from swsscommon.swsscommon import SonicV2Connector + +# Location for dedicated db's used for UT +module_tests_path = os.path.dirname(__file__) +dump_tests_path = os.path.join(module_tests_path, "../") +tests_path = os.path.join(dump_tests_path, "../") +dump_test_input = os.path.join(tests_path, "dump_input") +port_files_path = os.path.join(dump_test_input, "port") + +# Define the mock files to read from +dedicated_dbs = {} +dedicated_dbs['CONFIG_DB'] = os.path.join(port_files_path, "config_db.json") +dedicated_dbs['APPL_DB'] = os.path.join(port_files_path, "appl_db.json") +dedicated_dbs['ASIC_DB'] = os.path.join(port_files_path, "asic_db.json") +dedicated_dbs['STATE_DB'] = os.path.join(port_files_path, "state_db.json") + + +def populate_mock(db, db_names): + for db_name in db_names: + db.connect(db_name) + # Delete any default data + db.delete_all_by_pattern(db_name, "*") + with open(dedicated_dbs[db_name]) as f: + mock_json = json.load(f) + for key in mock_json: + for field, value in mock_json[key].items(): + db.set(db_name, key, field, value) + + +@pytest.fixture(scope="class", autouse=True) +def match_engine(): + + print("SETUP") + os.environ["VERBOSE"] = "1" + + # Monkey Patch the SonicV2Connector Object + from ...mock_tables import dbconnector + db = SonicV2Connector() + + # popualate the db with mock data + db_names = list(dedicated_dbs.keys()) + try: + populate_mock(db, db_names) + except Exception as e: + assert False, "Mock initialization failed: " + str(e) + + # Initialize connection pool + conn_pool = ConnectionPool() + DEF_NS = '' # Default Namespace + conn_pool.cache = {DEF_NS: {'conn': db, + 'connected_to': set(db_names)}} + + # Initialize match_engine + match_engine = MatchEngine(conn_pool) + yield match_engine + print("TEARDOWN") + os.environ["VERBOSE"] = "0" + + +@pytest.mark.usefixtures("match_engine") +class TestPortModule: + def test_working_state(self, match_engine): + """ + Scenario: When the config is properly applied and propagated + """ + params = {Port.ARG_NAME: "Ethernet176", "namespace": ""} + m_port = Port(match_engine) + returned = m_port.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORT|Ethernet176") + expect["APPL_DB"]["keys"].append("PORT_TABLE:Ethernet176") + expect["STATE_DB"]["keys"].append("PORT_TABLE|Ethernet176") + expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x100000000036a") + expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4d") + ddiff = DeepDiff(sort_lists(returned), sort_lists(expect), ignore_order=True) + assert not ddiff, ddiff + + def test_missing_asic_port(self, match_engine): + """ + Scenario: When the config was applied and just the SAI_OBJECT_TYPE_PORT is missing + """ + params = {Port.ARG_NAME: "Ethernet160", "namespace": ""} + m_port = Port(match_engine) + returned = m_port.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORT|Ethernet160") + expect["APPL_DB"]["keys"].append("PORT_TABLE:Ethernet160") + expect["STATE_DB"]["keys"].append("PORT_TABLE|Ethernet160") + expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a49") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT") + ddiff = DeepDiff(sort_lists(returned), sort_lists(expect), ignore_order=True) + assert not ddiff, ddiff + + def test_missing_asic_hostif(self, match_engine): + """ + Scenario: When the config was applied and it did not propagate to ASIC DB + """ + params = {Port.ARG_NAME: "Ethernet164", "namespace": ""} + m_port = Port(match_engine) + returned = m_port.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORT|Ethernet164") + expect["APPL_DB"]["keys"].append("PORT_TABLE:Ethernet164") + expect["STATE_DB"]["keys"].append("PORT_TABLE|Ethernet164") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF") + ddiff = DeepDiff(returned, expect, ignore_order=True) + assert not ddiff, ddiff + + def test_missing_state_and_appl(self, match_engine): + """ + Scenario: When the config was applied and it did not propagate to other db's + """ + params = {Port.ARG_NAME: "Ethernet156", "namespace": ""} + m_port = Port(match_engine) + returned = m_port.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORT|Ethernet156") + expect["APPL_DB"]["tables_not_found"].append("PORT_TABLE") + expect["STATE_DB"]["tables_not_found"].append("PORT_TABLE") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF") + ddiff = DeepDiff(returned, expect, ignore_order=True) + assert not ddiff, ddiff + + def test_no_port(self, match_engine): + """ + Scenario: When no entry for the port is present in any of the db's + """ + params = {Port.ARG_NAME: "Ethernet152", "namespace": ""} + m_port = Port(match_engine) + returned = m_port.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["tables_not_found"].append("PORT") + expect["APPL_DB"]["tables_not_found"].append("PORT_TABLE") + expect["STATE_DB"]["tables_not_found"].append("PORT_TABLE") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF") + ddiff = DeepDiff(returned, expect, ignore_order=True) + assert not ddiff, ddiff + + def test_all_args(self, match_engine): + """ + Scenario: Verify Whether the get_all_args method is working as expected + """ + params = {} + m_port = Port(match_engine) + returned = m_port.get_all_args("") + expect = ["Ethernet156", "Ethernet160", "Ethernet164", "Ethernet176"] + ddiff = DeepDiff(expect, returned, ignore_order=True) + assert not ddiff, ddiff