Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "Allow changing logfile size and logfile backup count" #55

Merged
merged 1 commit into from
Jul 19, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 6 additions & 20 deletions cbpi/controller/log_file_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ def log_data(self, name: str, value: str) -> None:
self.influxdb = self.cbpi.config.get("INFLUXDB", "No")
if self.logfiles == "Yes":
if name not in self.datalogger:
max_bytes = int(self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 131072))
backup_count = int(self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3))
max_bytes = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", 1048576)
backup_count = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", 3)

data_logger = logging.getLogger('cbpi.sensor.%s' % name)
data_logger.propagate = False
Expand All @@ -42,7 +42,7 @@ def log_data(self, name: str, value: str) -> None:
self.datalogger[name] = data_logger

formatted_time = strftime("%Y-%m-%d %H:%M:%S", localtime())
self.datalogger[name].info("%s,%s" % (formatted_time, str(value)))
self.datalogger[name].info("%s,%s" % (formatted_time, value))
if self.influxdb == "Yes":
self.influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", "No")
self.influxdbaddr = self.cbpi.config.get("INFLUXDBADDR", None)
Expand Down Expand Up @@ -116,6 +116,7 @@ def datetime_to_str(o):
for name in names:
# get all log names
all_filenames = glob.glob('./logs/sensor_%s.log*' % name)

# concat all logs
df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', name], header=None) for f in all_filenames])
logging.info("Read all files for {}".format(names))
Expand All @@ -124,29 +125,19 @@ def datetime_to_str(o):
df = df[name].resample(sample_rate).max()
logging.info("Sampled now for {}".format(names))
df = df.dropna()
# take every nth row so that total number of rows does not exceed max_rows * 2
max_rows = 500
total_rows = df.shape[0]
if (total_rows > 0) and (total_rows > max_rows):
nth = int(total_rows/max_rows)
if nth > 1:
df = df.iloc[::nth]

if result is None:
result = df
else:
result = pd.merge(result, df, how='outer', left_index=True, right_index=True)

data = {"time": df.index.tolist()}

if len(names) > 1:
for name in names:
data[name] = result[name].interpolate(limit_direction='both', limit=10).tolist()
else:
data[name] = result.interpolate().tolist()

logging.info("Send Log for {}".format(names))

return data

async def get_data2(self, ids) -> dict:
Expand All @@ -155,12 +146,7 @@ def dateparse(time_in_secs):

result = dict()
for id in ids:
# df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None)
# concat all logs
all_filenames = glob.glob('./logs/sensor_%s.log*' % id)
df = pd.concat([pd.read_csv(f, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime', 'Values'], header=None) for f in all_filenames])
df = df.resample('60s').max()
df = df.dropna()
df = pd.read_csv("./logs/sensor_%s.log" % id, parse_dates=True, date_parser=dateparse, index_col='DateTime', names=['DateTime',"Values"], header=None)
result[id] = {"time": df.index.astype(str).tolist(), "value":df.Values.tolist()}
return result

Expand Down
22 changes: 2 additions & 20 deletions cbpi/extension/ConfigUpdate/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,8 @@ async def run(self):
influxdbcloud = self.cbpi.config.get("INFLUXDBCLOUD", None)
mqttupdate = self.cbpi.config.get("MQTTUpdate", None)
PRESSURE_UNIT = self.cbpi.config.get("PRESSURE_UNIT", None)
SENSOR_LOG_BACKUP_COUNT = self.cbpi.config.get("SENSOR_LOG_BACKUP_COUNT", None)
SENSOR_LOG_MAX_BYTES = self.cbpi.config.get("SENSOR_LOG_MAX_BYTES", None)



if boil_temp is None:
logger.info("INIT Boil Temp Setting")
try:
Expand Down Expand Up @@ -286,23 +285,6 @@ async def run(self):
{"label": "PSI", "value": "PSI"}])
except:
logger.warning('Unable to update config')

# check if SENSOR_LOG_BACKUP_COUNT exists in config
if SENSOR_LOG_BACKUP_COUNT is None:
logger.info("INIT SENSOR_LOG_BACKUP_COUNT")
try:
await self.cbpi.config.add("SENSOR_LOG_BACKUP_COUNT", 3, ConfigType.NUMBER, "Max. number of backup logs")
except:
logger.warning('Unable to update database')

# check if SENSOR_LOG_MAX_BYTES exists in config
if SENSOR_LOG_MAX_BYTES is None:
logger.info("INIT SENSOR_LOG_MAX_BYTES")
try:
await self.cbpi.config.add("SENSOR_LOG_MAX_BYTES", 100000, ConfigType.NUMBER, "Max. number of bytes in sensor logs")
except:
logger.warning('Unable to update database')


def setup(cbpi):
cbpi.plugin.register("ConfigUpdate", ConfigUpdate)
Expand Down