Skip to content

Commit

Permalink
Fix logging and tests (#475)
Browse files Browse the repository at this point in the history
* fix api output fixture and tests

* return -> yield in all fixtures

* return -> yield in all fixtures

* fix scenario.run() tests

* add logging class and implement in codebase

* bugfix logger contextmanager

* improve test logging

* fix bug in create_roads
  • Loading branch information
LuukBlom authored Jun 28, 2024
1 parent 7683303 commit db77782
Show file tree
Hide file tree
Showing 19 changed files with 509 additions and 491 deletions.
5 changes: 5 additions & 0 deletions flood_adapt/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1,6 @@
from flood_adapt.log import FloodAdaptLogging

FloodAdaptLogging() # Initialize logging once for the entire package


__version__ = "0.1.0"
7 changes: 4 additions & 3 deletions flood_adapt/dbs_controller.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
import os
import shutil
from datetime import datetime
Expand All @@ -22,6 +21,7 @@
from flood_adapt.dbs_classes.dbs_static import DbsStatic
from flood_adapt.dbs_classes.dbs_strategy import DbsStrategy
from flood_adapt.integrator.sfincs_adapter import SfincsAdapter
from flood_adapt.log import FloodAdaptLogging
from flood_adapt.object_model.hazard.event.event_factory import EventFactory
from flood_adapt.object_model.hazard.event.synthetic import Synthetic
from flood_adapt.object_model.interface.benefits import IBenefit
Expand Down Expand Up @@ -99,7 +99,8 @@ def __init__(
return # Skip re-initialization

# If the database is not initialized, or a new path or name is provided, (re-)initialize
logging.info(
self._logger = FloodAdaptLogging.getLogger(__name__)
self._logger.info(
f"(Re-)Initializing database to {database_name} at {database_path}"
)
self.database_path = database_path
Expand Down Expand Up @@ -922,7 +923,7 @@ def has_run_hazard(self, scenario_name: str) -> None:
dirs_exist_ok=True,
ignore=shutil.ignore_patterns("simulations"),
)
print(
self._logger.info(
f"Hazard simulation is used from the '{scn.attrs.name}' scenario"
)

Expand Down
11 changes: 5 additions & 6 deletions flood_adapt/integrator/fiat_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@
from pathlib import Path
from typing import List, Optional, Union

from hydromt.log import setuplog
from hydromt_fiat.fiat import FiatModel

from flood_adapt.log import FloodAdaptLogging
from flood_adapt.object_model.direct_impact.measure.buyout import Buyout
from flood_adapt.object_model.direct_impact.measure.elevate import Elevate
from flood_adapt.object_model.direct_impact.measure.floodproof import FloodProof
Expand All @@ -25,8 +25,8 @@ class FiatAdapter:
def __init__(self, model_root: str, database_path: str) -> None:
"""Load FIAT model based on a root directory."""
# Load FIAT template
self.fiat_logger = setuplog("hydromt_fiat", log_level=10)
self.fiat_model = FiatModel(root=model_root, mode="r", logger=self.fiat_logger)
self._logger = FloodAdaptLogging.getLogger(__name__)
self.fiat_model = FiatModel(root=model_root, mode="r", logger=self._logger)
self.fiat_model.read()

# Get site information
Expand All @@ -52,10 +52,9 @@ def __init__(self, model_root: str, database_path: str) -> None:
self.bfe["name"] = self.site.attrs.fiat.bfe.field_name

def __del__(self) -> None:
# Close fiat_logger
for handler in self.fiat_logger.handlers:
for handler in self._logger.handlers:
handler.close()
self.fiat_logger.handlers.clear()
self._logger.handlers.clear()
# Use garbage collector to ensure file handlers are properly cleaned up
gc.collect()

Expand Down
26 changes: 10 additions & 16 deletions flood_adapt/integrator/sfincs_adapter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
import gc
import logging
import os
from pathlib import Path
from typing import Optional, Union
Expand All @@ -14,6 +13,7 @@
from hydromt_sfincs import SfincsModel
from hydromt_sfincs.quadtree import QuadtreeGrid

from flood_adapt.log import FloodAdaptLogging
from flood_adapt.object_model.hazard.event.event import EventModel
from flood_adapt.object_model.hazard.event.historical_hurricane import (
HistoricalHurricane,
Expand All @@ -32,31 +32,25 @@
)
from flood_adapt.object_model.site import Site

# from flood_adapt.object_model.validate.config import validate_existence_root_folder

logger = logging.getLogger(__name__)


class SfincsAdapter:

def __init__(self, site: Site, model_root: Optional[str] = None):
"""Load overland sfincs model based on a root directory.
Args:
model_root (str, optional): Root directory of overland sfincs model. Defaults to None.
"""
self.sfincs_logger = logging.getLogger(__name__)
self.sfincs_logger.handlers = [] # To ensure logging file path has reset
self.sf_model = SfincsModel(
root=model_root, mode="r+", logger=self.sfincs_logger
)
self._logger = FloodAdaptLogging.getLogger(__name__)
self.sf_model = SfincsModel(root=model_root, mode="r+", logger=self._logger)
self.sf_model.read()
self.site = site

def __del__(self):
# Close the log file associated with the logger
for handler in self.sfincs_logger.handlers:
for handler in self._logger.handlers:
handler.close()
self.sfincs_logger.handlers.clear()
self._logger.handlers.clear()
# Use garbage collector to ensure file handles are properly cleaned up
gc.collect()

Expand Down Expand Up @@ -242,7 +236,7 @@ def add_dis_bc(self, list_df: pd.DataFrame, site_river: list):
gdf_locs.crs = self.sf_model.crs

if len(list_df.columns) != len(gdf_locs):
logging.error(
self._logger.error(
"""The number of rivers of the site.toml does not match the
number of rivers in the SFINCS model. Please check the number
of coordinates in the SFINCS *.src file."""
Expand All @@ -257,7 +251,7 @@ def add_dis_bc(self, list_df: pd.DataFrame, site_river: list):
np.abs(gdf_locs.geometry[ii + 1].x - river.x_coordinate) < 5
and np.abs(gdf_locs.geometry[ii + 1].y - river.y_coordinate) < 5
):
logging.error(
self._logger.error(
"""The location and/or order of rivers in the site.toml does not match the
locations and/or order of rivers in the SFINCS model. Please check the
coordinates and their order in the SFINCS *.src file and ensure they are
Expand Down Expand Up @@ -301,9 +295,9 @@ def add_floodwall(self, floodwall: FloodWallModel, measure_path=Path):
for height in gdf_floodwall["z"]
]
gdf_floodwall["z"] = heights
logging.info("Using floodwall height from shape file.")
self._logger.info("Using floodwall height from shape file.")
except Exception:
logging.warning(
self._logger.warning(
f"""Could not use height data from file due to missing ""z""-column or missing values therein.\n
Using uniform height of {floodwall.elevation.convert(UnitTypesLength("meters"))} meters instead."""
)
Expand Down
105 changes: 105 additions & 0 deletions flood_adapt/log.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
import logging
import os
from contextlib import contextmanager


class FloodAdaptLogging:
_DEFAULT_FORMATTER = logging.Formatter(
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %I:%M:%S %p",
)
_root_logger = logging.getLogger("FloodAdapt")

def __init__(
self,
file_path: str = None,
loglevel_console: int = logging.WARNING,
loglevel_root: int = logging.INFO,
loglevel_files: int = logging.DEBUG,
formatter: logging.Formatter = _DEFAULT_FORMATTER,
) -> None:
"""Initialize the logging system for the FloodAdapt."""
self._formatter = formatter

self._root_logger.setLevel(loglevel_root)
if self._root_logger.hasHandlers():
self._root_logger.handlers.clear()

# Add file handler if provided
if file_path is not None:
self.add_file_handler(file_path, loglevel_files, formatter)

# Add console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(loglevel_console)
console_handler.setFormatter(formatter)
self._root_logger.addHandler(console_handler)

@classmethod
def add_file_handler(
cls,
file_path: str,
loglevel: int = logging.DEBUG,
formatter: logging.Formatter = None,
) -> None:
"""Add a file handler to the logger that directs outputs to a the file."""
if not os.path.exists(file_path):
os.makedirs(os.path.dirname(file_path), exist_ok=True)

file_handler = logging.FileHandler(filename=file_path, mode="a")
file_handler.setLevel(loglevel)

formatter = formatter or cls._DEFAULT_FORMATTER
file_handler.setFormatter(formatter)

cls.getLogger().addHandler(file_handler)

@classmethod
def remove_file_handler(cls, file_path: str) -> None:
"""Remove a file handler from the logger, which stops sending logs to that file and closes it."""
for handler in cls.getLogger().handlers:
if isinstance(
handler, logging.FileHandler
) and handler.baseFilename == os.path.abspath(file_path):
handler.close()
cls.getLogger().removeHandler(handler)

@classmethod
def getLogger(cls, name: str = None) -> logging.Logger:
if name is None:
return cls._root_logger

return logging.getLogger(f"FloodAdapt.{name}")

@classmethod
def shutdown(cls):
root_logger = cls.getLogger()
handlers = root_logger.handlers[:]
for handler in handlers:
if isinstance(handler, logging.FileHandler):
handler.close()
root_logger.removeHandler(handler)
logging.shutdown()

@classmethod
@contextmanager
def to_file(
cls,
*,
file_path: str = None,
loglevel: int = logging.DEBUG,
formatter: logging.Formatter = _DEFAULT_FORMATTER,
):
"""Open a file at filepath to write logs to. Does not affect other loggers.
When the context manager exits (via regular execution or an exception), the file is closed and the handler is removed.
"""
if file_path is None:
raise ValueError(
"file_path must be provided as a key value pair: 'file_path=<file_path>'."
)
cls.add_file_handler(file_path, loglevel, formatter)
try:
yield
finally:
cls.remove_file_handler(file_path)
29 changes: 15 additions & 14 deletions flood_adapt/object_model/direct_impacts.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import logging
import shutil
import subprocess
import time
Expand All @@ -18,6 +17,7 @@

import flood_adapt.config as FloodAdapt_config
from flood_adapt.integrator.fiat_adapter import FiatAdapter
from flood_adapt.log import FloodAdaptLogging
from flood_adapt.object_model.direct_impact.impact_strategy import ImpactStrategy
from flood_adapt.object_model.direct_impact.socio_economic_change import (
SocioEconomicChange,
Expand All @@ -41,6 +41,7 @@ class DirectImpacts:
has_run: bool = False

def __init__(self, scenario: ScenarioModel, database, results_path: Path) -> None:
self._logger = FloodAdaptLogging.getLogger(__name__)
self.name = scenario.name
self.database = database
self.scenario = scenario
Expand Down Expand Up @@ -121,15 +122,15 @@ def set_hazard(self, scenario: ScenarioModel, database, results_dir: Path) -> No
self.hazard = Hazard(scenario, database, results_dir)

def preprocess_models(self):
logging.info("Preparing impact models...")
self._logger.info("Preparing impact models...")
# Preprocess all impact model input
start_time = time.time()
self.preprocess_fiat()
end_time = time.time()
print(f"FIAT preprocessing took {str(round(end_time - start_time, 2))} seconds")

def run_models(self):
logging.info("Running impact models...")
self._logger.info("Running impact models...")
start_time = time.time()
return_code = self.run_fiat()
end_time = time.time()
Expand All @@ -140,10 +141,10 @@ def run_models(self):
self.__setattr__("has_run", True)

def postprocess_models(self):
logging.info("Post-processing impact models...")
self._logger.info("Post-processing impact models...")
# Preprocess all impact model input
self.postprocess_fiat()
logging.info("Impact models post-processing complete!")
self._logger.info("Impact models post-processing complete!")

def preprocess_fiat(self):
"""Update FIAT model based on scenario information and then runs the FIAT model."""
Expand Down Expand Up @@ -308,17 +309,17 @@ def postprocess_fiat(self):
if self.site_info.attrs.fiat.roads_file_name:
self._create_roads(fiat_results_df)

logging.info("Post-processing complete!")
self._logger.info("Post-processing complete!")

# If site config is set to not keep FIAT simulation, then delete folder
if not self.site_info.attrs.fiat.save_simulation:
try:
shutil.rmtree(self.fiat_path)
except OSError as e_info:
logging.warning(f"{e_info}\nCould not delete {self.fiat_path}.")
self._logger.warning(f"{e_info}\nCould not delete {self.fiat_path}.")

def _create_roads(self, fiat_results_df):
logging.info("Saving road impacts...")
self._logger.info("Saving road impacts...")
# Read roads spatial file
roads = gpd.read_file(
self.fiat_path.joinpath("output", self.site_info.attrs.fiat.roads_file_name)
Expand All @@ -335,10 +336,10 @@ def _create_roads(self, fiat_results_df):
)
# Save as geopackage
outpath = self.impacts_path.joinpath(f"Impacts_roads_{self.name}.gpkg")
roads.to_file(outpath, format="geopackage")
roads.to_file(outpath, driver="GPKG")

def _create_equity(self, metrics_path):
logging.info("Calculating equity weighted risk...")
self._logger.info("Calculating equity weighted risk...")
# Get metrics tables
metrics_fold = metrics_path.parent
# loop through metrics aggregated files
Expand Down Expand Up @@ -402,7 +403,7 @@ def _create_equity(self, metrics_path):
metrics_new.to_csv(file)

def _create_aggregation(self, metrics_path):
logging.info("Saving impacts on aggregation areas...")
self._logger.info("Saving impacts on aggregation areas...")

# Define where aggregated results are saved
output_fold = self.impacts_path
Expand Down Expand Up @@ -439,7 +440,7 @@ def _create_aggregation(self, metrics_path):
)

def _create_footprints(self, fiat_results_df):
logging.info("Saving impacts on building footprints...")
self._logger.info("Saving impacts on building footprints...")

# Get footprints file paths from site.toml
# TODO ensure that if this does not happen we get same file name output from FIAT?
Expand Down Expand Up @@ -508,7 +509,7 @@ def _add_exeedance_probability(self, fiat_results_path):

def _create_infometrics(self, fiat_results_df) -> Path:
# Get the metrics configuration
logging.info("Calculating infometrics...")
self._logger.info("Calculating infometrics...")

if self.hazard.event_mode == "risk":
ext = "_risk"
Expand Down Expand Up @@ -547,7 +548,7 @@ def _create_infometrics(self, fiat_results_df) -> Path:
return metrics_outputs_path

def _create_infographics(self, mode, metrics_path):
logging.info("Creating infographics...")
self._logger.info("Creating infographics...")

# Get the infographic
InforgraphicFactory.create_infographic_file_writer(
Expand Down
Loading

0 comments on commit db77782

Please sign in to comment.