Skip to content

Commit

Permalink
Merge branch 'master' into reformulate-co2-constraint
Browse files Browse the repository at this point in the history
  • Loading branch information
fneum authored Jan 18, 2024
2 parents 333e99e + 6d2f2cc commit b0e2365
Show file tree
Hide file tree
Showing 13 changed files with 76 additions and 23 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ repos:

# Format Snakemake rule / workflow files
- repo: https://github.com/snakemake/snakefmt
rev: v0.8.5
rev: v0.9.0
hooks:
- id: snakefmt

Expand Down
2 changes: 1 addition & 1 deletion config/config.default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -376,7 +376,7 @@ sector:
2045: 0.8
2050: 1.0
district_heating_loss: 0.15
cluster_heat_buses: false
cluster_heat_buses: true
bev_dsm_restriction_value: 0.75
bev_dsm_restriction_time: 7
transport_heating_deadband_upper: 20.
Expand Down
11 changes: 11 additions & 0 deletions doc/release_notes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ Release Notes
Upcoming Release
================

* Bugfix: Correct technology keys for the electricity production plotting to work out the box.

* New configuration option ``everywhere_powerplants`` to build conventional powerplants everywhere, irrespective of existing powerplants locations, in the network (https://github.com/PyPSA/pypsa-eur/pull/850).

* Remove option for wave energy as technology data is not maintained.
Expand All @@ -18,6 +20,15 @@ Upcoming Release
CO2 atmosphere store. This gives a more sparse constraint that should improve
the performance of the solving process.

* Bugfix: Assure entering of code block which corrects Norwegian heat demand.

* Add warning when BEV availability weekly profile has negative values in `build_transport_demand`.

* Stacktrace of uncaught exceptions should now be correctly included inside log files (via `configure_logging(..)`).

* Cluster residential and services heat buses by default. Can be disabled with ``cluster_heat_buses: false``.


PyPSA-Eur 0.9.0 (5th January 2024)
==================================

Expand Down
2 changes: 1 addition & 1 deletion rules/retrieve.smk
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ if config["enable"]["retrieve"]:
input:
HTTP.remote(
"data.open-power-system-data.org/time_series/{version}/time_series_60min_singleindex.csv".format(
version="2019-06-05"
version="2019-06-05"
if config["snapshots"]["end"] < "2019"
else "2020-10-06"
),
Expand Down
27 changes: 25 additions & 2 deletions scripts/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ def configure_logging(snakemake, skip_handlers=False):
Do (not) skip the default handlers created for redirecting output to STDERR and file.
"""
import logging
import sys

kwargs = snakemake.config.get("logging", dict()).copy()
kwargs.setdefault("level", "INFO")
Expand All @@ -103,6 +104,16 @@ def configure_logging(snakemake, skip_handlers=False):
)
logging.basicConfig(**kwargs)

# Setup a function to handle uncaught exceptions and include them with their stacktrace into logfiles
def handle_exception(exc_type, exc_value, exc_traceback):
# Log the exception
logger = logging.getLogger()
logger.error(
"Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
)

sys.excepthook = handle_exception


def update_p_nom_max(n):
# if extendable carriers (solar/onwind/...) have capacity >= 0,
Expand Down Expand Up @@ -223,7 +234,13 @@ def update_to(b=1, bsize=1, tsize=None):
urllib.request.urlretrieve(url, file, reporthook=update_to)


def mock_snakemake(rulename, root_dir=None, configfiles=[], **wildcards):
def mock_snakemake(
rulename,
root_dir=None,
configfiles=[],
submodule_dir="workflow/submodules/pypsa-eur",
**wildcards,
):
"""
This function is expected to be executed from the 'scripts'-directory of '
the snakemake project. It returns a snakemake.script.Snakemake object,
Expand All @@ -239,6 +256,9 @@ def mock_snakemake(rulename, root_dir=None, configfiles=[], **wildcards):
path to the root directory of the snakemake project
configfiles: list, str
list of configfiles to be used to update the config
submodule_dir: str, Path
in case PyPSA-Eur is used as a submodule, submodule_dir is
the path of pypsa-eur relative to the project directory.
**wildcards:
keyword arguments fixing the wildcards. Only necessary if wildcards are
needed.
Expand All @@ -257,7 +277,10 @@ def mock_snakemake(rulename, root_dir=None, configfiles=[], **wildcards):
root_dir = Path(root_dir).resolve()

user_in_script_dir = Path.cwd().resolve() == script_dir
if user_in_script_dir:
if str(submodule_dir) in __file__:
# the submodule_dir path is only need to locate the project dir
os.chdir(Path(__file__[: __file__.find(str(submodule_dir))]))
elif user_in_script_dir:
os.chdir(root_dir)
elif Path.cwd().resolve() != root_dir:
raise RuntimeError(
Expand Down
2 changes: 1 addition & 1 deletion scripts/build_electricity_production.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
gen = client.query_generation(country, start=start, end=end, nett=True)
gen = gen.tz_localize(None).resample("1h").mean()
gen = gen.loc[start.tz_localize(None) : end.tz_localize(None)]
gen = gen.rename(columns=carrier_grouper).groupby(level=0, axis=1).sum()
gen = gen.rename(columns=carrier_grouper).T.groupby(level=0).sum().T
generation.append(gen)
except NoMatchingDataError:
unavailable_countries.append(country)
Expand Down
2 changes: 1 addition & 1 deletion scripts/build_energy_totals.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ def build_energy_totals(countries, eurostat, swiss, idees):
# The main heating source for about 73 per cent of the households is based on electricity
# => 26% is non-electric

if "NO" in df:
if "NO" in df.index:
elec_fraction = 0.73

no_norway = df.drop("NO")
Expand Down
13 changes: 12 additions & 1 deletion scripts/build_transport_demand.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,14 @@
availability and demand-side management constraints.
"""

import logging

import numpy as np
import pandas as pd
import xarray as xr
from _helpers import generate_periodic_profiles
from _helpers import configure_logging, generate_periodic_profiles

logger = logging.getLogger(__name__)


def build_nodal_transport_data(fn, pop_layout):
Expand Down Expand Up @@ -130,6 +134,12 @@ def bev_availability_profile(fn, snapshots, nodes, options):
traffic.mean() - traffic.min()
)

if not avail[avail < 0].empty:
logger.warning(
"The BEV availability weekly profile has negative values which can "
"lead to infeasibility."
)

return generate_periodic_profiles(
dt_index=snapshots,
nodes=nodes,
Expand Down Expand Up @@ -160,6 +170,7 @@ def bev_dsm_profile(snapshots, nodes, options):
simpl="",
clusters=48,
)
configure_logging(snakemake)

pop_layout = pd.read_csv(snakemake.input.clustered_pop_layout, index_col=0)

Expand Down
13 changes: 5 additions & 8 deletions scripts/make_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -521,9 +521,7 @@ def calculate_weighted_prices(n, label, weighted_prices):

if not names.empty:
load += (
n.links_t.p0[names]
.groupby(n.links.loc[names, "bus0"], axis=1)
.sum()
n.links_t.p0[names].T.groupby(n.links.loc[names, "bus0"]).sum().T
)

# Add H2 Store when charging
Expand Down Expand Up @@ -563,11 +561,10 @@ def calculate_market_values(n, label, market_values):

dispatch = (
n.generators_t.p[gens]
.groupby(n.generators.loc[gens, "bus"], axis=1)
.T.groupby(n.generators.loc[gens, "bus"])
.sum()
.reindex(columns=buses, fill_value=0.0)
.T.reindex(columns=buses, fill_value=0.0)
)

revenue = dispatch * n.buses_t.marginal_price[buses]

market_values.at[tech, label] = revenue.sum().sum() / dispatch.sum().sum()
Expand All @@ -586,9 +583,9 @@ def calculate_market_values(n, label, market_values):

dispatch = (
n.links_t["p" + i][links]
.groupby(n.links.loc[links, "bus" + i], axis=1)
.T.groupby(n.links.loc[links, "bus" + i])
.sum()
.reindex(columns=buses, fill_value=0.0)
.T.reindex(columns=buses, fill_value=0.0)
)

revenue = dispatch * n.buses_t.marginal_price[buses]
Expand Down
9 changes: 5 additions & 4 deletions scripts/plot_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -271,10 +271,11 @@ def plot_h2_map(network, regions):
assign_location(n)

h2_storage = n.stores.query("carrier == 'H2'")
regions["H2"] = h2_storage.rename(
index=h2_storage.bus.map(n.buses.location)
).e_nom_opt.div(
1e6
regions["H2"] = (
h2_storage.rename(index=h2_storage.bus.map(n.buses.location))
.e_nom_opt.groupby(level=0)
.sum()
.div(1e6)
) # TWh
regions["H2"] = regions["H2"].where(regions["H2"] > 0.1)

Expand Down
8 changes: 5 additions & 3 deletions scripts/plot_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def plot_costs():

df = df.drop(to_drop)

logger.info(f"Total system cost of {round(df.sum()[0])} EUR billion per year")
logger.info(f"Total system cost of {round(df.sum().iloc[0])} EUR billion per year")

new_index = preferred_order.intersection(df.index).append(
df.index.difference(preferred_order)
Expand Down Expand Up @@ -214,7 +214,7 @@ def plot_energy():

df = df.drop(to_drop)

logger.info(f"Total energy of {round(df.sum()[0])} TWh/a")
logger.info(f"Total energy of {round(df.sum().iloc[0])} TWh/a")

if df.empty:
fig, ax = plt.subplots(figsize=(12, 8))
Expand Down Expand Up @@ -304,7 +304,9 @@ def plot_balances():

df = df.drop(to_drop)

logger.debug(f"Total energy balance for {v} of {round(df.sum()[0],2)} {units}")
logger.debug(
f"Total energy balance for {v} of {round(df.sum().iloc[0],2)} {units}"
)

if df.empty:
continue
Expand Down
6 changes: 6 additions & 0 deletions scripts/plot_validation_electricity_production.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,12 @@
header=[0, 1],
parse_dates=True,
)
subset_technologies = ["Geothermal", "Nuclear", "Biomass", "Lignite", "Oil", "Coal"]
lowercase_technologies = [
technology.lower() if technology in subset_technologies else technology
for technology in historic.columns.levels[1]
]
historic.columns = historic.columns.set_levels(lowercase_technologies, level=1)

colors = n.carriers.set_index("nice_name").color.where(
lambda s: s != "", "lightgrey"
Expand Down
2 changes: 2 additions & 0 deletions scripts/solve_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,6 +341,8 @@ def prepare_network(
for df in (
n.generators_t.p_max_pu,
n.generators_t.p_min_pu,
n.links_t.p_max_pu,
n.links_t.p_min_pu,
n.storage_units_t.inflow,
):
df.where(df > solve_opts["clip_p_max_pu"], other=0.0, inplace=True)
Expand Down

0 comments on commit b0e2365

Please sign in to comment.