Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[debug dump util] Module implementation Logic and Port Module #1667

Merged
merged 27 commits into from
Aug 30, 2021
Merged
Show file tree
Hide file tree
Changes from 21 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
cb06a3b
Port Module and UT Added
vivekrnv May 14, 2021
d236a7d
Multi-Asic Changes Added
vivekrnv May 20, 2021
f5ffd27
Added TODO to mock
vivekrnv May 24, 2021
cef2433
Merge branch 'master' of https://github.com/vivekreddynv/sonic-utilit…
vivekrnv May 24, 2021
bfe107e
redis_match name updated
vivekrnv May 27, 2021
e6607da
copp_cfg is not required in this PR
vivekrnv May 27, 2021
ee22710
Final Changes before review made
vivekrnv May 29, 2021
6d094f4
Comments Addressed
vivekrnv Jun 9, 2021
ffe6158
Minor changes addressed
vivekrnv Jun 9, 2021
b750eba
Comments addressed
vivekrnv Jun 9, 2021
b92290e
Changes made
vivekrnv Jun 10, 2021
6d76514
Moved mock files to diff dir
vivekrnv Jun 10, 2021
244ff65
LGTM issue addressed
vivekrnv Jun 13, 2021
483cc52
Multi-Asic Related Changes made
vivekrnv Jun 23, 2021
826ded0
Merge branch 'master' of https://github.com/Azure/sonic-utilities int…
vivekrnv Jun 23, 2021
0d42722
Merge branch 'master' of https://github.com/Azure/sonic-utilities int…
vivekrnv Jul 14, 2021
63f9aa4
Minor Fixes
vivekrnv Jul 14, 2021
06b8297
Merge branch 'master' of https://github.com/Azure/sonic-utilities int…
vivekrnv Jul 16, 2021
b8d2421
Merge branch 'master' of https://github.com/Azure/sonic-utilities int…
vivekrnv Jul 27, 2021
94eedcc
Refactored and Added Comments
vivekrnv Jul 27, 2021
0db4c12
Refactored the port_test
vivekrnv Jul 29, 2021
8dac306
pep-8 issues handled
vivekrnv Aug 14, 2021
dc43339
Remaining pep8 fixes
vivekrnv Aug 17, 2021
1eb3be8
pep-8 fixes and introduced connection pool optimization
vivekrnv Aug 19, 2021
663f46b
Minor changes owing to connection pool addition
vivekrnv Aug 19, 2021
0082da2
Removed the entra mock and added a match_engine fixture
vivekrnv Aug 19, 2021
2ea3725
LGTM issues handled
vivekrnv Aug 19, 2021
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions dump/plugins/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import os
import sys
import pkgutil
import importlib
from .executor import Executor


dump_modules = {}
pkg_dir = os.path.dirname(__file__)

vivekrnv marked this conversation as resolved.
Show resolved Hide resolved
# import child classes automatically
for (module_loader, name, ispkg) in pkgutil.iter_modules([pkg_dir]):
importlib.import_module('.' + name, __package__)

# Classes inheriting Executor
dump_modules = {cls.__name__.lower(): cls for cls in Executor.__subclasses__()}
16 changes: 16 additions & 0 deletions dump/plugins/executor.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from abc import ABC, abstractmethod

class Executor(ABC):
""" Abstract Class which should be extended from in order to be included in the dump state CLI """
vivekrnv marked this conversation as resolved.
Show resolved Hide resolved

ARG_NAME = "id" # Arg Identifier
CONFIG_FILE = "" # Path to config file, if any

@abstractmethod
def execute(self, params):
pass

@abstractmethod
def get_all_args(self, ns):
pass

77 changes: 77 additions & 0 deletions dump/plugins/port.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
from .executor import Executor
from dump.match_infra import MatchEngine, MatchRequest
from dump.helper import create_template_dict

class Port(Executor):
"""
Debug Dump Plugin for PORT Module
"""
ARG_NAME = "port_name"

def __init__(self):
self.match_engine = MatchEngine()
self.ret_temp = {}
self.ns = ''

def get_all_args(self, ns=""):
req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern="*", ns=ns)
ret = self.match_engine.fetch(req)
all_ports = ret["keys"]
return [key.split("|")[-1] for key in all_ports]

def execute(self, params):
self.ret_temp = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"])
port_name = params[Port.ARG_NAME]
self.ns = params["namespace"]
self.init_port_config_info(port_name)
self.init_port_appl_info(port_name)
port_asic_obj = self.init_asic_hostif_info(port_name)
self.init_asic_port_info(port_asic_obj)
self.init_state_port_info(port_name)
return self.ret_temp

vivekrnv marked this conversation as resolved.
Show resolved Hide resolved
def add_to_ret_template(self, table, db, keys, err):
if not err and keys:
self.ret_temp[db]["keys"].extend(keys)
return True
else:
self.ret_temp[db]["tables_not_found"].extend([table])
return False

def init_port_config_info(self, port_name):
SuvarnaMeenakshi marked this conversation as resolved.
Show resolved Hide resolved
req = MatchRequest(db="CONFIG_DB", table="PORT", key_pattern=port_name, ns=self.ns)
ret = self.match_engine.fetch(req)
self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"])

def init_port_appl_info(self, port_name):
req = MatchRequest(db="APPL_DB", table="PORT_TABLE", key_pattern=port_name, ns=self.ns)
ret = self.match_engine.fetch(req)
self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"])

def init_state_port_info(self, port_name):
req = MatchRequest(db="STATE_DB", table="PORT_TABLE", key_pattern=port_name, ns=self.ns)
ret = self.match_engine.fetch(req)
self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"])

def init_asic_hostif_info(self, port_name):
req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", key_pattern="*", field="SAI_HOSTIF_ATTR_NAME",
value=port_name, return_fields=["SAI_HOSTIF_ATTR_OBJ_ID"], ns=self.ns)
ret = self.match_engine.fetch(req)
asic_port_obj_id = ""

if not ret["error"] and len(ret["keys"]) != 0:
self.ret_temp[req.db]["keys"] = ret["keys"]
sai_hostif_obj_key = ret["keys"][-1]
if sai_hostif_obj_key in ret["return_values"] and "SAI_HOSTIF_ATTR_OBJ_ID" in ret["return_values"][sai_hostif_obj_key]:
asic_port_obj_id = ret["return_values"][sai_hostif_obj_key]["SAI_HOSTIF_ATTR_OBJ_ID"]
else:
self.ret_temp[req.db]["tables_not_found"] = [req.table]
return asic_port_obj_id

def init_asic_port_info(self, asic_port_obj_id):
if not asic_port_obj_id:
self.ret_temp["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_PORT")
return
req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_PORT", key_pattern=asic_port_obj_id, ns=self.ns)
ret = self.match_engine.fetch(req)
self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"])
35 changes: 35 additions & 0 deletions tests/dump_input/port/appl_db.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
{
SuvarnaMeenakshi marked this conversation as resolved.
Show resolved Hide resolved
"PORT_TABLE:Ethernet176": {
"index": "0",
"lanes": "0",
"alias": "etp45",
"speed": "25000",
"oper_status": "up",
"pfc_asym": "off",
"mtu": "9100",
vivekrnv marked this conversation as resolved.
Show resolved Hide resolved
"fec": "rs",
"admin_status": "up"
},
"PORT_TABLE:Ethernet160": {
"index": "0",
"lanes": "0",
"alias": "etp41",
"speed": "25000",
"oper_status": "up",
"pfc_asym": "off",
"mtu": "9100",
"fec": "rs",
"admin_status": "up"
},
"PORT_TABLE:Ethernet164": {
"index": "0",
"lanes": "0",
"alias": "etp42",
"speed": "25000",
"oper_status": "up",
"pfc_asym": "off",
"mtu": "9100",
"fec": "rs",
"admin_status": "up"
}
}
29 changes: 29 additions & 0 deletions tests/dump_input/port/asic_db.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{
vivekrnv marked this conversation as resolved.
Show resolved Hide resolved
"ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4d":{
"SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV",
"SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x100000000036a",
"SAI_HOSTIF_ATTR_NAME" : "Ethernet176",
"SAI_HOSTIF_ATTR_OPER_STATUS" : "true"
},
"ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x100000000036a": {
"SAI_PORT_ATTR_ADMIN_STATE" : "true",
"SAI_PORT_ATTR_SPEED" : "25000",
"SAI_PORT_ATTR_MTU" : "9122"
},
"ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a49":{
"SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV",
"SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000002e6",
"SAI_HOSTIF_ATTR_NAME" : "Ethernet160",
"SAI_HOSTIF_ATTR_OPER_STATUS" : "true"
},
"ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000a4a":{
"SAI_HOSTIF_ATTR_TYPE" : "SAI_HOSTIF_TYPE_NETDEV",
"SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x1000000000307",
"SAI_HOSTIF_ATTR_OPER_STATUS" : "true"
},
"ASIC_STATE:SAI_OBJECT_TYPE_PORT:oid:0x1000000000307": {
"SAI_PORT_ATTR_ADMIN_STATE" : "true",
"SAI_PORT_ATTR_SPEED" : "25000",
"SAI_PORT_ATTR_MTU" : "9122"
}
}
30 changes: 30 additions & 0 deletions tests/dump_input/port/config_db.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
{
"PORT|Ethernet176": {
vivekrnv marked this conversation as resolved.
Show resolved Hide resolved
"admin_status" : "up",
"alias": "etp45",
"index": "45",
"lanes": "176",
"speed": "25000"
},
"PORT|Ethernet164": {
"admin_status" : "up",
"alias": "etp42",
"index": "42",
"lanes": "164",
"speed": "25000"
},
"PORT|Ethernet160": {
"admin_status" : "up",
"alias": "etp41",
"index": "41",
"lanes": "160",
"speed": "25000"
},
"PORT|Ethernet156": {
"admin_status" : "up",
"alias": "etp40",
"index": "40",
"lanes": "156",
"speed": "25000"
}
}
14 changes: 14 additions & 0 deletions tests/dump_input/port/state_db.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
{
"PORT_TABLE|Ethernet176":{
vivekrnv marked this conversation as resolved.
Show resolved Hide resolved
"state" : "ok",
"netdev_oper_status" : "up"
},
"PORT_TABLE|Ethernet160":{
"state" : "ok",
"netdev_oper_status" : "up"
},
"PORT_TABLE|Ethernet164":{
"state" : "ok",
"netdev_oper_status" : "up"
}
}
Empty file.
69 changes: 69 additions & 0 deletions tests/dump_tests/module_tests/mock_sonicv2connector.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import json, re, os
from utilities_common.constants import DEFAULT_NAMESPACE

class MockSonicV2Connector():
def __init__(self, dedicated_dbs, **kwargs):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why we need to introduce a new class for mocking Sonicv2connector. Can we add new functionality to dbconnector.py

Copy link
Contributor Author

@vivekrnv vivekrnv Aug 13, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Testing Use case for these modules is to read from a separate json files as opposed to other tests which read from the default json files inside mock_tables directory.

I did see there is a dedicated_db list used inside one of the dbconnector methods

global dedicated_dbs
if dedicated_dbs and dedicated_dbs.get(db_name):
        self.dbintf.redis_kwargs['db_name'] = dedicated_dbs[db_name]
else:
        self.dbintf.redis_kwargs['db_name'] = db_name

The only test which uses this is db_migrator_test.py. However in that test, it was initializing the SonicV2Connector class directly inside the test, whereas in dump_module_tests it is being initialized in the methods defined in the actual classes.

And probably because of the complex redirection used in the dbconnector.py to mock the SonicV2Connector, i was not able to get the dump_module_tests tests running on custom json files. I've also worked with stephan who introduced that change but couldn't get it running.

Hence, decided to add a new mock class.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I found a couple of examples of using the dedicated_dbs recently added recently

https://github.com/Azure/sonic-utilities/blob/master/tests/config_int_ip_test.py
https://github.com/Azure/sonic-utilities/blob/5002745beb89a99a1cdf420f34a495c2c0cb31bd/tests/mpls_test.py#L60-L67

Can you check if we can follow a similar approach here?
I feel we should try and avoid duplicate mock classes if possible

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks for the info. I was able to remove the extra mock and use the default mock. I couldn't get it to directly read the custom json files, but i've handled it nevertheless.

Regarding the connection pooling optimization, i've recently observed when operating at scale, (i.e. route, had around 10k routes to parse for the dump utility) i've observed a significant amount of time spent in connect calls. With this optimization, the number of connect calls can be effectively reduced from (number_of_routes*number_of_calls_per_route) to 4 (number of db's the module is looking into) and thus offering a significant improvement in performance.
This design change infact helped me to use the default mock as well.

@SuvarnaMeenakshi and @arlakshm , please provide your comments on the changes.

if "namespace" in kwargs and kwargs["namespace"] != DEFAULT_NAMESPACE:
raise "This Mock doesn't support multi-asic configuration"
arlakshm marked this conversation as resolved.
Show resolved Hide resolved
self.db = None
self.db_name_connect_to = None
self.dedicated_dbs = dedicated_dbs
db_config_path = os.path.join(os.path.dirname(__file__), "../../mock_tables/database_config.json")
with open(db_config_path) as f:
self.db_cfg = json.load(f)

vivekrnv marked this conversation as resolved.
Show resolved Hide resolved
def connect(self, db_name, retry=False):
if db_name not in self.dedicated_dbs:
raise Exception("{} not found. Available db's: {}".format(db_name, self.dedicated_dbs.keys()))
try:
with open(self.dedicated_dbs[db_name]) as f:
self.db = json.load(f)
self.db_name_connect_to = db_name
except BaseException as e:
raise "Connection Failure" + str(e)

def get_db_separator(self, db_name):
return self.db_cfg["DATABASES"][db_name]["separator"]

def keys(self, db_name, pattern):
if not self.db:
raise "MockDB Not Connected"
if self.db_name_connect_to != db_name:
raise "Failed to find {} in the MockDB".format(db_name)

pattern = re.escape(pattern)
pattern = pattern.replace("\\*", ".*")
filtered_keys = []
all_keys = self.db.keys()
for key in all_keys:
if re.match(pattern, key):
filtered_keys.append(key)
return filtered_keys

def get_all(self, db_name, key):
if not self.db:
raise "MockDB Not Connected"
if self.db_name_connect_to != db_name:
raise "Failed to find {} in the MockDB".format(db_name)
if key not in self.db:
return {}
return self.db[key]

def get(self, db_name, key, field):
if not self.db:
raise "MockDB Not Connected"
if self.db_name_connect_to != db_name:
raise "Failed to find {} in the MockDB".format(db_name)
if key not in self.db or field not in self.db[key]:
return ""
return self.db[key][field]

def hexists(self, db_name, key, field):
if not self.db:
raise "MockDB Not Connected"
if self.db_name_connect_to != db_name:
raise "Failed to find {} in the MockDB".format(db_name)
if key not in self.db or field not in self.db[key]:
return False
else:
return True
Loading