Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Logrotate] Update log rotate configuration via ConfigDB #61

Open
wants to merge 2 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 50 additions & 1 deletion scripts/hostcfgd
Original file line number Diff line number Diff line change
Expand Up @@ -1500,6 +1500,43 @@ class FipsCfg(object):
syslog.syslog(syslog.LOG_INFO, f'FipsCfg: update the FIPS enforce option {self.enforce}.')
loader.set_fips(image, self.enforce)

class LoggingCfg(object):
"""Logging Config Daemon

Handles changes in LOGGING table.
1) Handle change of debug/syslog log files config
"""
def __init__(self):
self.cache = {}

def load(self, logging_cfg={}):
# Get initial logging file configuration
self.cache = logging_cfg
syslog.syslog(syslog.LOG_DEBUG, f'Initial logging config: {self.cache}')
fastiuk marked this conversation as resolved.
Show resolved Hide resolved

def update_logging_cfg(self, key, data):
"""Apply logging configuration

The daemon restarts logrotate-config which will regenerate logrotate
config files.
Args:
key: DB table's key that was triggered change (basically it is a
config file)
data: File's config data
"""
syslog.syslog(syslog.LOG_DEBUG, 'LoggingCfg: logging files cfg update')
if self.cache.get(key) != data:
syslog.syslog(syslog.LOG_INFO,
f'Set logging file {key} config: {data}')
try:
run_cmd('sudo systemctl restart logrotate-config', True, True)
except Exception:
syslog.syslog(syslog.LOG_ERR, f'Failed to update {key} message')
return

# Update cache
self.cache[key] = data

class HostConfigDaemon:
def __init__(self):
self.state_db_conn = DBConnector(STATE_DB, 0)
Expand Down Expand Up @@ -1551,6 +1588,9 @@ class HostConfigDaemon:
# Initialize FipsCfg
self.fipscfg = FipsCfg(self.state_db_conn)

# Initialize LoggingCfg
self.loggingcfg = LoggingCfg()

def load(self, init_data):
aaa = init_data['AAA']
tacacs_global = init_data['TACPLUS']
Expand All @@ -1571,6 +1611,7 @@ class HostConfigDaemon:
ntp_global = init_data.get(swsscommon.CFG_NTP_GLOBAL_TABLE_NAME)
ntp_servers = init_data.get(swsscommon.CFG_NTP_SERVER_TABLE_NAME)
ntp_keys = init_data.get(swsscommon.CFG_NTP_KEY_TABLE_NAME)
logging = init_data.get('LOGGING', {})

self.aaacfg.load(aaa, tacacs_global, tacacs_server, radius_global, radius_server)
self.iptables.load(lpbk_table)
Expand All @@ -1579,11 +1620,11 @@ class HostConfigDaemon:
self.sshscfg.load(ssh_server)
self.devmetacfg.load(dev_meta)
self.mgmtifacecfg.load(mgmt_ifc, mgmt_vrf)

self.rsyslogcfg.load(syslog_cfg, syslog_srv)
self.dnscfg.load(dns)
self.fipscfg.load(fips_cfg)
self.ntpcfg.load(ntp_global, ntp_servers, ntp_keys)
self.loggingcfg.load(logging)

# Update AAA with the hostname
self.aaacfg.hostname_update(self.devmetacfg.hostname)
Expand Down Expand Up @@ -1716,6 +1757,10 @@ class HostConfigDaemon:
data = self.config_db.get_table("FIPS")
self.fipscfg.fips_handler(data)

def logging_handler(self, key, op, data):
syslog.syslog(syslog.LOG_INFO, 'LOGGING table handler...')
self.loggingcfg.update_logging_cfg(key, data)

def wait_till_system_init_done(self):
# No need to print the output in the log file so using the "--quiet"
# flag
Expand Down Expand Up @@ -1778,6 +1823,10 @@ class HostConfigDaemon:
self.config_db.subscribe(swsscommon.CFG_NTP_KEY_TABLE_NAME,
make_callback(self.ntp_srv_key_handler))

# Handle LOGGING changes
self.config_db.subscribe(swsscommon.CFG_LOGGING_TABLE_NAME,
make_callback(self.logging_handler))

syslog.syslog(syslog.LOG_INFO,
"Waiting for systemctl to finish initialization")
self.wait_till_system_init_done()
Expand Down
91 changes: 91 additions & 0 deletions tests/hostcfgd/hostcfgd_logging_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
import importlib.machinery
import importlib.util
import os
import sys

from copy import copy
from swsscommon import swsscommon
from syslog import syslog, LOG_ERR
from tests.hostcfgd.test_logging_vectors \
import HOSTCFGD_TEST_LOGGING_VECTOR as logging_test_data
from tests.common.mock_configdb import MockConfigDb, MockDBConnector
from unittest import TestCase, mock

test_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
modules_path = os.path.dirname(test_path)
scripts_path = os.path.join(modules_path, "scripts")
src_path = os.path.dirname(modules_path)
templates_path = os.path.join(src_path, "sonic-host-services-data/templates")
output_path = os.path.join(test_path, "hostcfgd/output")
sample_output_path = os.path.join(test_path, "hostcfgd/sample_output")
sys.path.insert(0, modules_path)

# Load the file under test
hostcfgd_path = os.path.join(scripts_path, 'hostcfgd')
loader = importlib.machinery.SourceFileLoader('hostcfgd', hostcfgd_path)
spec = importlib.util.spec_from_loader(loader.name, loader)
hostcfgd = importlib.util.module_from_spec(spec)
loader.exec_module(hostcfgd)
sys.modules['hostcfgd'] = hostcfgd

# Mock swsscommon classes
hostcfgd.ConfigDBConnector = MockConfigDb
hostcfgd.DBConnector = MockDBConnector
hostcfgd.Table = mock.Mock()
hostcfgd.run_cmd = mock.Mock()


class TestHostcfgLogging(TestCase):
"""
Test hostcfgd daemon - LogRotate
"""

def __init__(self, *args, **kwargs):
super(TestHostcfgLogging, self).__init__(*args, **kwargs)
self.host_config_daemon = None

def setUp(self):
MockConfigDb.set_config_db(logging_test_data['initial'])
self.host_config_daemon = hostcfgd.HostConfigDaemon()

logging_config = self.host_config_daemon.config_db.get_table(
swsscommon.CFG_LOGGING_TABLE_NAME)

assert self.host_config_daemon.loggingcfg.cache == {}
self.host_config_daemon.loggingcfg.load(logging_config)
assert self.host_config_daemon.loggingcfg.cache != {}

# Reset run_cmd mock
hostcfgd.run_cmd.reset_mock()

def tearDown(self):
self.host_config_daemon = None
MockConfigDb.set_config_db({})

def update_config(self, config_name):
MockConfigDb.mod_config_db(logging_test_data[config_name])

syslog_data = logging_test_data[config_name]['LOGGING']['syslog']
debug_data = logging_test_data[config_name]['LOGGING']['debug']

self.host_config_daemon.logging_handler(key='syslog', op=None,
data=syslog_data)
self.host_config_daemon.logging_handler(key='debug', op=None,
data=debug_data)

def assert_applied(self, config_name):
"""Assert that updated config triggered appropriate services

Args:
config_name: str: Test vectors config name

Assert:
Assert when config wasn't used
"""
orig_cache = copy(self.host_config_daemon.loggingcfg.cache)
self.update_config(config_name)
assert self.host_config_daemon.loggingcfg.cache != orig_cache
hostcfgd.run_cmd.assert_called()

def test_rsyslog_handle_modified(self):
self.assert_applied('modified')
48 changes: 48 additions & 0 deletions tests/hostcfgd/test_logging_vectors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
'''
hostcfgd test logging configuration vector
'''

HOSTCFGD_TEST_LOGGING_VECTOR = {
'initial': {
'DEVICE_METADATA': {
'localhost': {
'hostname': 'logrotate',
},
},
'LOGGING': {
'syslog': {
'disk_percentage': '',
'frequency': 'daily',
'max_number': '20',
'size': '10.0'
},
'debug': {
'disk_percentage': '',
'frequency': 'daily',
'max_number': '10',
'size': '20.0'
}
},
"SSH_SERVER": {
"POLICIES" :{
"max_sessions": "100"
}
}
},
'modified': {
'LOGGING': {
'syslog': {
'disk_percentage': '',
'frequency': 'weekly',
'max_number': '100',
'size': '20.0'
},
'debug': {
'disk_percentage': '',
'frequency': 'weekly',
'max_number': '20',
'size': '100.0'
}
}
}
}
Loading