Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Updates in site.toml to clear not-used attributes and update some vairables #462

Merged
merged 18 commits into from
Jun 24, 2024
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
f967003
removed unused attributes from site toml
panosatha Jun 11, 2024
398f34a
small update in lint
panosatha Jun 11, 2024
16a4045
made observation points non mandatory
panosatha Jun 11, 2024
e26377c
changed site attribute obs_station to tide_gauge and removed unused a…
panosatha Jun 11, 2024
a73201b
changed the flooding_threshold attribute to belong to a flood_frequen…
panosatha Jun 11, 2024
2ba8301
added default value for flood frequency flooding threshold
panosatha Jun 12, 2024
c2177e5
when no river is present template discharge is used for now
panosatha Jun 12, 2024
17c0a8c
added source option to tide_gauge site attribute
panosatha Jun 12, 2024
8ce89b6
small correction in docstring
panosatha Jun 13, 2024
a3b73ba
changed slr.scenarios in site.toml and made them not mandatory
panosatha Jun 14, 2024
7ba16fe
offshore model and cyclone tracks not mandatory anymore
panosatha Jun 14, 2024
58de58d
Merge branch 'main' of https://github.com/Deltares-research/FloodAdap…
panosatha Jun 17, 2024
1f6f38a
small updates
panosatha Jun 18, 2024
06eee84
Merge branch 'main' of https://github.com/Deltares-research/FloodAdap…
panosatha Jun 19, 2024
8929744
naming change
panosatha Jun 19, 2024
6ff8d94
make sure that the workflow does not break when there is nothing to plot
panosatha Jun 19, 2024
0f0d0be
Merge branch 'main' of https://github.com/Deltares-research/FloodAdap…
panosatha Jun 19, 2024
94f8724
todo on api for aggregation
panosatha Jun 24, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ jobs:
pip install ruff
# Include `--format=github` to enable automatic inline annotations.
- name: Run Ruff
run: ruff check .
run: ruff check . --fix
black:
runs-on: ubuntu-latest
steps:
Expand Down
4 changes: 2 additions & 2 deletions flood_adapt/api/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,10 +120,10 @@ def copy_event(old_name: str, new_name: str, new_description: str) -> None:


def download_wl_data(
station_id, start_time, end_time, units: UnitTypesLength, file=None
station_id, start_time, end_time, units: UnitTypesLength, source: str, file=None
) -> pd.DataFrame:
return HistoricalNearshore.download_wl_data(
station_id, start_time, end_time, units, file
station_id, start_time, end_time, units, source, file
)


Expand Down
2 changes: 1 addition & 1 deletion flood_adapt/api/projections.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def copy_projection(old_name: str, new_name: str, new_description: str) -> None:


def get_slr_scn_names() -> list:
return Database().get_slr_scn_names()
return Database().static.get_slr_scn_names()


def interp_slr(slr_scenario: str, year: float) -> float:
Expand Down
17 changes: 10 additions & 7 deletions flood_adapt/dbs_classes/dbs_static.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,12 @@ def get_model_grid(self) -> QuadtreeGrid:
@cache_method_wrapper
def get_obs_points(self) -> GeoDataFrame:
"""Get the observation points from the flood hazard model."""
names = []
descriptions = []
lat = []
lon = []
if self._database.site.attrs.obs_point is not None:
obs_points = self._database.site.attrs.obs_point
names = []
descriptions = []
lat = []
lon = []
for pt in obs_points:
names.append(pt.name)
descriptions.append(pt.description)
Expand Down Expand Up @@ -135,16 +135,19 @@ def get_static_map(self, path: Union[str, Path]) -> gpd.GeoDataFrame:

@cache_method_wrapper
def get_slr_scn_names(self) -> list:
"""Get the names of the sea level rise scenarios from the slr.csv file.
"""Get the names of the sea level rise scenarios from the file provided.

Returns
-------
list
List of scenario names
"""
input_file = self._database.static_path.joinpath("slr", "slr.csv")
input_file = self._database.static_path.joinpath(
self._database.site.attrs.slr.scenarios.file
)
df = pd.read_csv(input_file)
return df.columns[2:].to_list()
names = df.columns[2:].to_list()
return names

@cache_method_wrapper
def get_green_infra_table(self, measure_type: str) -> pd.DataFrame:
Expand Down
16 changes: 12 additions & 4 deletions flood_adapt/dbs_controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,15 +191,17 @@ def interp_slr(self, slr_scenario: str, year: float) -> float:
ValueError
if the year to evaluate is outside of the time range in the slr.csv file
"""
input_file = self.input_path.parent.joinpath("static", "slr", "slr.csv")
input_file = self.input_path.parent.joinpath(
"static", self.site.attrs.slr.scenarios.file
)
df = pd.read_csv(input_file)
if year > df["year"].max() or year < df["year"].min():
raise ValueError(
"The selected year is outside the range of the available SLR scenarios"
)
else:
slr = np.interp(year, df["year"], df[slr_scenario])
ref_year = self.site.attrs.slr.relative_to_year
ref_year = self.site.attrs.slr.scenarios.relative_to_year
if ref_year > df["year"].max() or ref_year < df["year"].min():
raise ValueError(
f"The reference year {ref_year} is outside the range of the available SLR scenarios"
Expand All @@ -215,7 +217,9 @@ def interp_slr(self, slr_scenario: str, year: float) -> float:

# TODO: should probably be moved to frontend
def plot_slr_scenarios(self) -> str:
input_file = self.input_path.parent.joinpath("static", "slr", "slr.csv")
input_file = self.input_path.parent.joinpath(
"static", self.site.attrs.slr.scenarios.file
)
df = pd.read_csv(input_file)
ncolors = len(df.columns) - 2
try:
Expand All @@ -236,7 +240,7 @@ def plot_slr_scenarios(self) -> str:
) as e:
print(e)

ref_year = self.site.attrs.slr.relative_to_year
ref_year = self.site.attrs.slr.scenarios.relative_to_year
if ref_year > df["Year"].max() or ref_year < df["Year"].min():
raise ValueError(
f"The reference year {ref_year} is outside the range of the available SLR scenarios"
Expand Down Expand Up @@ -465,6 +469,10 @@ def plot_river(
) -> (
str
): # I think we need a separate function for the different timeseries when we also want to plot multiple rivers
if any(df.empty for df in input_river_df) and any(
river["source"] == "timeseries" for river in event["river"]
):
return ""
event["name"] = "temp_event"
temp_event = EventFactory.get_event(event["template"]).load_dict(event)
event_dir = self.events.get_database_path().joinpath(temp_event.attrs.name)
Expand Down
5 changes: 3 additions & 2 deletions flood_adapt/object_model/hazard/event/historical_nearshore.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ def download_wl_data(
start_time_str: str,
stop_time_str: str,
units: UnitTypesLength,
source: str,
file: Union[str, None],
) -> pd.DataFrame:
"""Download waterlevel data from NOAA station using station_id, start and stop time.
Expand Down Expand Up @@ -94,8 +95,8 @@ def download_wl_data(
df = df_temp.iloc[startindex:stopindex, :]
else:
# Get NOAA data
source = cht_station.source("noaa_coops")
df = source.get_data(station_id, start_time, stop_time)
source_obj = cht_station.source(source)
df = source_obj.get_data(station_id, start_time, stop_time)
df = pd.DataFrame(df) # Convert series to dataframe
df = df.rename(columns={"v": 1})
# convert to gui units
Expand Down
61 changes: 38 additions & 23 deletions flood_adapt/object_model/hazard/hazard.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,16 +88,17 @@ def set_simulation_paths(self) -> None:
)
]
# Create a folder name for the offshore model (will not be used if offshore model is not created)
self.simulation_paths_offshore = [
self.database.scenarios.get_database_path(
get_input_path=False
).joinpath(
self.name,
"Flooding",
"simulations",
self.site.attrs.sfincs.offshore_model,
)
]
if self.site.attrs.sfincs.offshore_model is not None:
self.simulation_paths_offshore = [
self.database.scenarios.get_database_path(
get_input_path=False
).joinpath(
self.name,
"Flooding",
"simulations",
self.site.attrs.sfincs.offshore_model,
)
]
elif self._mode == Mode.risk: # risk mode requires an additional folder layer
self.simulation_paths = []
self.simulation_paths_offshore = []
Expand All @@ -114,17 +115,18 @@ def set_simulation_paths(self) -> None:
)
)
# Create a folder name for the offshore model (will not be used if offshore model is not created)
self.simulation_paths_offshore.append(
self.database.scenarios.get_database_path(
get_input_path=False
).joinpath(
self.name,
"Flooding",
"simulations",
subevent.attrs.name,
self.site.attrs.sfincs.offshore_model,
if self.site.attrs.sfincs.offshore_model is not None:
self.simulation_paths_offshore.append(
self.database.scenarios.get_database_path(
get_input_path=False
).joinpath(
self.name,
"Flooding",
"simulations",
subevent.attrs.name,
self.site.attrs.sfincs.offshore_model,
)
)
)

def has_run_check(self) -> bool:
"""_summary_.
Expand Down Expand Up @@ -413,7 +415,12 @@ def preprocess_sfincs(
model.add_wl_bc(self.wl_ts)

# ASSUMPTION: Order of the rivers is the same as the site.toml file
self.event.add_dis_ts(event_dir=event_dir, site_river=self.site.attrs.river)
if self.site.attrs.river is not None:
self.event.add_dis_ts(
event_dir=event_dir, site_river=self.site.attrs.river
)
else:
self.event.dis_df = None
if self.event.dis_df is not None:
# Generate and change discharge boundary condition
logging.info(
Expand Down Expand Up @@ -596,6 +603,10 @@ def preprocess_sfincs_offshore(self, ds: xr.DataArray, ii: int):
ds (xr.DataArray): DataArray with meteo information (downloaded using event.download_meteo())
ii (int): Iterator for event set
"""
if self.site.attrs.sfincs.offshore_model is None:
raise ValueError(
f"An offshore model needs to be defined in the site.toml with sfincs.offshore_model to run an event of type '{self.event.attrs.template}'"
)
# Determine folders for offshore model
path_in_offshore = self.database.static_path.joinpath(
"templates", self.site.attrs.sfincs.offshore_model
Expand Down Expand Up @@ -686,7 +697,8 @@ def postprocess_sfincs(self):
# Write flood-depth map geotiff
self.write_floodmap_geotiff()
# Write watel-level time-series
self.plot_wl_obs()
if self.site.attrs.obs_point is not None:
self.plot_wl_obs()
# Write max water-level netcdf
self.write_water_level_map()
elif self._mode == Mode.risk:
Expand Down Expand Up @@ -805,9 +817,12 @@ def plot_wl_obs(self):
start_time_str=self.event.attrs.time.start_time,
stop_time_str=self.event.attrs.time.end_time,
units=UnitTypesLength(gui_units),
source=self.site.attrs.tide_gauge.source,
file=file,
)
except COOPSAPIError as e:
except (
COOPSAPIError
) as e: # TODO this should be a generic error!
logging.warning(
f"Could not download tide gauge data for station {self.site.attrs.obs_point[ii].ID}. {e}"
)
Expand Down
66 changes: 46 additions & 20 deletions flood_adapt/object_model/interface/site.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from enum import Enum
from typing import Any, Optional, Union

from pydantic import BaseModel
from pydantic import BaseModel, model_validator

from flood_adapt.object_model.io.unitfulvalue import (
UnitfulDischarge,
Expand Down Expand Up @@ -32,15 +32,21 @@ class Floodmap_type(str, Enum):
water_depth = "water_depth"


class TideGaugeSource(str, Enum):
"""The accepted input for the variable source in tide_gauge."""

file = "file"
noaa_coops = "noaa_coops"


class SfincsModel(BaseModel):
"""The accepted input for the variable sfincs in Site."""

csname: str
cstype: Cstype
version: Optional[str] = ""
offshore_model: str
offshore_model: Optional[str] = None
overland_model: str
ambient_air_pressure: float
floodmap_units: UnitTypesLength
save_simulation: Optional[bool] = False

Expand All @@ -55,7 +61,6 @@ class VerticalReferenceModel(BaseModel):
class WaterLevelReferenceModel(BaseModel):
"""The accepted input for the variable water_level in Site."""

reference: VerticalReferenceModel
localdatum: VerticalReferenceModel
msl: VerticalReferenceModel
other: Optional[list[VerticalReferenceModel]] = [] # only for plotting
Expand All @@ -67,11 +72,18 @@ class Cyclone_track_databaseModel(BaseModel):
file: str


class SlrScenariosModel(BaseModel):
"""The accepted input for the variable slr.scenarios ."""

file: str
relative_to_year: int


class SlrModel(BaseModel):
"""The accepted input for the variable slr in Site."""

vertical_offset: UnitfulLength
relative_to_year: int
scenarios: Optional[SlrScenariosModel] = None


class DamageType(str, Enum):
Expand Down Expand Up @@ -135,10 +147,15 @@ class GuiModel(BaseModel):
class RiskModel(BaseModel):
"""The accepted input for the variable risk in Site."""

flooding_threshold: UnitfulLength
return_periods: list


class FloodFrequencyModel(BaseModel):
"""The accepted input for the variable flood_frequency in Site."""

flooding_threshold: UnitfulLength


class DemModel(BaseModel):
"""The accepted input for the variable dem in Site."""

Expand Down Expand Up @@ -196,22 +213,27 @@ class RiverModel(BaseModel):
y_coordinate: float


class Obs_stationModel(BaseModel):
"""The accepted input for the variable obs_station in Site.
class TideGaugeModel(BaseModel):
"""The accepted input for the variable tide_gauge in Site.

The obs_station is used for the download of tide gauge data, to be added to the hazard model as water level boundary condition.
"""

name: Union[int, str]
name: Optional[Union[int, str]] = None
description: Optional[str] = ""
ID: int
source: TideGaugeSource
ID: int # This is the only attribute that is currently used in FA!
file: Optional[str] = None # for locally stored data
lat: float
lon: float
mllw: Optional[UnitfulLength] = None
mhhw: Optional[UnitfulLength] = None
localdatum: Optional[UnitfulLength] = None
msl: Optional[UnitfulLength] = None
lat: Optional[float] = None
lon: Optional[float] = None

@model_validator(mode="after")
def validate_selection_type(self) -> "TideGaugeModel":
if self.source == "file" and self.file is None:
raise ValueError(
"If `source` is 'file' a file path relative to the static folder should be provided with the attribute 'file'."
)
return self


class Obs_pointModel(BaseModel):
Expand Down Expand Up @@ -265,15 +287,19 @@ class SiteModel(BaseModel):
lon: float
sfincs: SfincsModel
water_level: WaterLevelReferenceModel
cyclone_track_database: Cyclone_track_databaseModel
cyclone_track_database: Optional[Cyclone_track_databaseModel] = None
slr: SlrModel
gui: GuiModel
risk: RiskModel
# TODO what should the default be
flood_frequency: Optional[FloodFrequencyModel] = {
"flooding_threshold": UnitfulLength(value=0.0, units="meters")
}
dem: DemModel
fiat: FiatModel
river: Optional[list[RiverModel]] = []
obs_station: Optional[Obs_stationModel] = None
obs_point: Optional[list[Obs_pointModel]] = []
tide_gauge: Optional[TideGaugeModel] = None
river: Optional[list[RiverModel]] = None
obs_point: Optional[list[Obs_pointModel]] = None
benefits: BenefitsModel
scs: Optional[SCSModel] = None # optional for the US to use SCS rainfall curves

Expand Down
Loading
Loading