From 527c77824c8eba0695327fb60b9cff23df0d3744 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 5 Jan 2023 16:41:49 +0100 Subject: [PATCH 01/64] allow carbon capture in biogas upgrading (closes #49) --- scripts/prepare_sector_network.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b6c052beb..540b3f232 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1874,10 +1874,12 @@ def add_biomass(n, costs): bus0=spatial.gas.biogas, bus1=spatial.gas.nodes, bus2="co2 atmosphere", + bus3="co2 stored", carrier="biogas to gas", capital_cost=costs.loc["biogas upgrading", "fixed"], marginal_cost=costs.loc["biogas upgrading", "VOM"], efficiency2=-costs.at['gas', 'CO2 intensity'], + efficiency3=costs.at["biogas", "CO2 stored"], p_nom_extendable=True ) From 54346b071e07fd7e4f9eed00698a5d3bfc62a678 Mon Sep 17 00:00:00 2001 From: euronion <42553970+euronion@users.noreply.github.com> Date: Sat, 22 Apr 2023 20:33:38 +0200 Subject: [PATCH 02/64] Address pandas deprecation of date_parser for pandas.read_csv --- doc/release_notes.rst | 2 ++ envs/environment.yaml | 2 +- scripts/build_electricity_demand.py | 5 +---- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index f859646bb..7f4dac1ce 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -17,6 +17,8 @@ Upcoming Release * Renamed script file from PyPSA-EUR ``build_load_data`` to ``build_electricity_demand``. +* Adressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement. + PyPSA-Eur 0.8.0 (18th March 2023) ================================= diff --git a/envs/environment.yaml b/envs/environment.yaml index 0a9891a50..baa84f69c 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -26,7 +26,7 @@ dependencies: - lxml - powerplantmatching>=0.5.5 - numpy<1.24 -- pandas>=1.4 +- pandas>=2.0 - geopandas>=0.11.0 - xarray - rioxarray diff --git a/scripts/build_electricity_demand.py b/scripts/build_electricity_demand.py index b86b4a5fa..790a958fc 100755 --- a/scripts/build_electricity_demand.py +++ b/scripts/build_electricity_demand.py @@ -80,11 +80,8 @@ def load_timeseries(fn, years, countries, powerstatistics=True): def rename(s): return s[: -len(pattern)] - def date_parser(x): - return dateutil.parser.parse(x, ignoretz=True) - return ( - pd.read_csv(fn, index_col=0, parse_dates=[0], date_parser=date_parser) + pd.read_csv(fn, index_col=0, parse_dates=[0], date_format="%Y-%m-%dT%H:%M:%SZ") .filter(like=pattern) .rename(columns=rename) .dropna(how="all", axis=0) From f4ff3dffc2567dd6cef17f4459bb9a5ef3818d64 Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Mon, 24 Apr 2023 13:09:04 +0200 Subject: [PATCH 03/64] Update doc/release_notes.rst --- doc/release_notes.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 7f4dac1ce..120e8fbd6 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -17,7 +17,7 @@ Upcoming Release * Renamed script file from PyPSA-EUR ``build_load_data`` to ``build_electricity_demand``. -* Adressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement. +* Addressed deprecation warnings for ``pandas=2.0``. ``pandas=2.0`` is now minimum requirement. PyPSA-Eur 0.8.0 (18th March 2023) ================================= From cc162a9e028fb7a2bac5289e27b90ab57e46f10b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 31 Jul 2023 17:09:59 +0200 Subject: [PATCH 04/64] option for losses on bidirectional links via link splitting --- config/config.default.yaml | 5 ++++ scripts/prepare_sector_network.py | 40 +++++++++++++++++++++++++++++++ scripts/solve_network.py | 22 +++++++++++++++++ 3 files changed, 67 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index b162b75dc..4413b8f5d 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -478,6 +478,11 @@ sector: electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true + transmission_losses: + # per 1000 km + DC: 0 + H2 pipeline: 0 + gas pipeline: 0 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 11406bffc..8719c2816 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3280,6 +3280,34 @@ def set_temporal_aggregation(n, opts, solver_name): return n +def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): + "Split bidirectional links into two unidirectional links to include transmission losses." + + carrier_i = n.links.query("carrier == @carrier").index + + if not losses_per_thousand_km or carrier_i.empty: + return + + logger.info( + f"Specified losses for {carrier} transmission. Splitting bidirectional links." + ) + + carrier_i = n.links.query("carrier == @carrier").index + n.links.loc[carrier_i, "p_min_pu"] = 0 + n.links["reversed"] = False + n.links.loc[carrier_i, "efficiency"] = ( + 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 + ) + rev_links = ( + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + ) + rev_links.capital_cost = 0 + rev_links.reversed = True + rev_links.index = rev_links.index.map(lambda x: x + "-reversed") + + n.links = pd.concat([n.links, rev_links], sort=False) + + if __name__ == "__main__": if "snakemake" not in globals(): from _helpers import mock_snakemake @@ -3446,6 +3474,18 @@ def set_temporal_aggregation(n, opts, solver_name): if options["electricity_grid_connection"]: add_electricity_grid_connection(n, costs) + for k, v in options["transmission_losses"].items(): + lossy_bidirectional_links(n, k, v) + + # Workaround: Remove lines with conflicting (and unrealistic) properties + # cf. https://github.com/PyPSA/pypsa-eur/issues/444 + if snakemake.config["solving"]["options"]["transmission_losses"]: + idx = n.lines.query("num_parallel == 0").index + logger.info( + f"Removing {len(idx)} line(s) with properties conflicting with transmission losses functionality." + ) + n.mremove("Line", idx) + first_year_myopic = (snakemake.params.foresight == "myopic") and ( snakemake.params.planning_horizons[0] == investment_year ) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 836544b4b..a68ca0748 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -494,6 +494,27 @@ def add_battery_constraints(n): n.model.add_constraints(lhs == 0, name="Link-charger_ratio") +def add_lossy_bidirectional_link_constraints(n): + if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: + return + + carriers = n.links.loc[n.links.reversed, "carrier"].unique() + + backward_i = n.links.query( + "carrier in @carriers and reversed and p_nom_extendable" + ).index + forward_i = n.links.query( + "carrier in @carriers and ~reversed and p_nom_extendable" + ).index + + assert len(forward_i) == len(backward_i) + + lhs = n.model["Link-p_nom"].loc[backward_i] + rhs = n.model["Link-p_nom"].loc[forward_i] + + n.model.add_constraints(lhs == rhs, name="Link-bidirectional_sync") + + def add_chp_constraints(n): electric = ( n.links.index.str.contains("urban central") @@ -593,6 +614,7 @@ def extra_functionality(n, snapshots): if "EQ" in o: add_EQ_constraints(n, o) add_battery_constraints(n) + add_lossy_bidirectional_link_constraints(n) add_pipe_retrofit_constraint(n) From e4eff27e508406055284ba77f4727df7e2dcbc6c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 3 Aug 2023 13:09:12 +0200 Subject: [PATCH 05/64] fix capacity synchronisation between forward and backward lossy links --- scripts/prepare_sector_network.py | 4 ++-- scripts/solve_network.py | 6 +----- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 8719c2816..b8eb8bc1c 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3294,7 +3294,6 @@ def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 - n.links["reversed"] = False n.links.loc[carrier_i, "efficiency"] = ( 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 ) @@ -3302,10 +3301,11 @@ def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links.capital_cost = 0 - rev_links.reversed = True + rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") n.links = pd.concat([n.links, rev_links], sort=False) + n.links["reversed"] = n.links["reversed"].fillna(False) if __name__ == "__main__": diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a68ca0748..5e8c0356c 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -500,14 +500,10 @@ def add_lossy_bidirectional_link_constraints(n): carriers = n.links.loc[n.links.reversed, "carrier"].unique() - backward_i = n.links.query( - "carrier in @carriers and reversed and p_nom_extendable" - ).index forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" ).index - - assert len(forward_i) == len(backward_i) + backward_i = forward_i + "-reversed" lhs = n.model["Link-p_nom"].loc[backward_i] rhs = n.model["Link-p_nom"].loc[forward_i] From d7cb13246b807e7907c49ad1214559be92d2f363 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 7 Aug 2023 14:31:19 +0200 Subject: [PATCH 06/64] link losses: exponential rather than linear model --- config/config.default.yaml | 13 ++++++++----- scripts/prepare_sector_network.py | 15 ++++++++++----- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 4413b8f5d..1b0a2260c 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -478,11 +478,14 @@ sector: electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true - transmission_losses: - # per 1000 km - DC: 0 - H2 pipeline: 0 - gas pipeline: 0 + transmission_efficiency: + DC: + efficiency_static: 0.98 + efficiency_per_1000km: 0.977 + H2 pipeline: + efficiency_per_1000km: 0.979 + gas pipeline: + efficiency_per_1000km: 0.977 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b8eb8bc1c..48f5f41f2 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3280,22 +3280,27 @@ def set_temporal_aggregation(n, opts, solver_name): return n -def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): +def lossy_bidirectional_links(n, carrier, efficiencies={}): "Split bidirectional links into two unidirectional links to include transmission losses." carrier_i = n.links.query("carrier == @carrier").index - if not losses_per_thousand_km or carrier_i.empty: + if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: return + efficiency_static = efficiencies.get("efficiency_static", 1) + efficiency_per_1000km = efficiencies.get("efficiency_per_1000km", 1) + logger.info( - f"Specified losses for {carrier} transmission. Splitting bidirectional links." + f"Specified losses for {carrier} transmission" + f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km})." + "Splitting bidirectional links." ) carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 n.links.loc[carrier_i, "efficiency"] = ( - 1 - n.links.loc[carrier_i, "length"] * losses_per_thousand_km / 1e3 + efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) ) rev_links = ( n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) @@ -3474,7 +3479,7 @@ def lossy_bidirectional_links(n, carrier, losses_per_thousand_km=0.0): if options["electricity_grid_connection"]: add_electricity_grid_connection(n, costs) - for k, v in options["transmission_losses"].items(): + for k, v in options["transmission_efficiency"].items(): lossy_bidirectional_links(n, k, v) # Workaround: Remove lines with conflicting (and unrealistic) properties From 118cabe8a60b238ef11aafc980a406011ea9f0fb Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 8 Aug 2023 17:56:22 +0200 Subject: [PATCH 07/64] add option to consider compression losses in pipelines as electricity demand --- config/config.default.yaml | 6 ++++-- scripts/prepare_sector_network.py | 11 ++++++++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 1b0a2260c..81a26a0bb 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -483,9 +483,11 @@ sector: efficiency_static: 0.98 efficiency_per_1000km: 0.977 H2 pipeline: - efficiency_per_1000km: 0.979 + efficiency_per_1000km: 1 # 0.979 + compression_per_1000km: 0.019 gas pipeline: - efficiency_per_1000km: 0.977 + efficiency_per_1000km: 1 #0.977 + compression_per_1000km: 0.01 H2_network: true gas_network: false H2_retrofit: false diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 48f5f41f2..7b58329c6 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3285,11 +3285,16 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): carrier_i = n.links.query("carrier == @carrier").index +<<<<<<< HEAD if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: +======= + if not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) or carrier_i.empty: +>>>>>>> 5822adb0 (add option to consider compression losses in pipelines as electricity demand) return efficiency_static = efficiencies.get("efficiency_static", 1) efficiency_per_1000km = efficiencies.get("efficiency_per_1000km", 1) + compression_per_1000km = efficiencies.get("compression_per_1000km", 0) logger.info( f"Specified losses for {carrier} transmission" @@ -3297,7 +3302,6 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): "Splitting bidirectional links." ) - carrier_i = n.links.query("carrier == @carrier").index n.links.loc[carrier_i, "p_min_pu"] = 0 n.links.loc[carrier_i, "efficiency"] = ( efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) @@ -3312,6 +3316,11 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links = pd.concat([n.links, rev_links], sort=False) n.links["reversed"] = n.links["reversed"].fillna(False) + # do compression losses after concatenation to take electricity consumption at bus0 in either direction + carrier_i = n.links.query("carrier == @carrier").index + if compression_per_1000km > 0: + n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map(n.buses.location) # electricity + n.links.loc[carrier_i, "efficiency2"] = - compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 if __name__ == "__main__": if "snakemake" not in globals(): From 592bc4eee7f57ef93e104f266595cb6d8ded754d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 12 Sep 2023 17:28:42 +0200 Subject: [PATCH 08/64] cherry-pick --- scripts/prepare_sector_network.py | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 7b58329c6..de02095d0 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3285,11 +3285,10 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): carrier_i = n.links.query("carrier == @carrier").index -<<<<<<< HEAD - if not any(v != 1. for v in efficiencies.values()) or carrier_i.empty: -======= - if not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) or carrier_i.empty: ->>>>>>> 5822adb0 (add option to consider compression losses in pipelines as electricity demand) + if ( + not any((v != 1.0) or (v >= 0) for v in efficiencies.values()) + or carrier_i.empty + ): return efficiency_static = efficiencies.get("efficiency_static", 1) @@ -3303,8 +3302,10 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): ) n.links.loc[carrier_i, "p_min_pu"] = 0 - n.links.loc[carrier_i, "efficiency"] = ( - efficiency_static * efficiency_per_1000km ** (n.links.loc[carrier_i, "length"] / 1e3) + n.links.loc[ + carrier_i, "efficiency" + ] = efficiency_static * efficiency_per_1000km ** ( + n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) @@ -3319,8 +3320,13 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): # do compression losses after concatenation to take electricity consumption at bus0 in either direction carrier_i = n.links.query("carrier == @carrier").index if compression_per_1000km > 0: - n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map(n.buses.location) # electricity - n.links.loc[carrier_i, "efficiency2"] = - compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + n.links.loc[carrier_i, "bus2"] = n.links.loc[carrier_i, "bus0"].map( + n.buses.location + ) # electricity + n.links.loc[carrier_i, "efficiency2"] = ( + -compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + ) + if __name__ == "__main__": if "snakemake" not in globals(): From 666e79e2fdb7b86348a81e097a0c6e200872b661 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Thu, 10 Aug 2023 17:13:19 +0200 Subject: [PATCH 09/64] improve logging for lossy bidirectional links --- scripts/prepare_sector_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index de02095d0..6355f603e 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3296,8 +3296,8 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): compression_per_1000km = efficiencies.get("compression_per_1000km", 0) logger.info( - f"Specified losses for {carrier} transmission" - f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km})." + f"Specified losses for {carrier} transmission " + f"(static: {efficiency_static}, per 1000km: {efficiency_per_1000km}, compression per 1000km: {compression_per_1000km}). " "Splitting bidirectional links." ) From bde04eeac9dad86b9d05ce6d23f48d98a728ba7f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 29 Aug 2023 16:32:01 +0200 Subject: [PATCH 10/64] lossy_bidirectional_links: set length of reversed lines to 0 to avoid double counting in line volume limit --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 6355f603e..cd5d95709 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3311,6 +3311,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links.capital_cost = 0 + rev_links.length = 0 rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") From 014a4cd62e3bc2f41e9e0ccd8e04ff6c169e9a60 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Sun, 12 Nov 2023 18:42:53 +0100 Subject: [PATCH 11/64] fix for losses with multi-period investment --- scripts/solve_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 832812848..fa59f7a3d 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -697,7 +697,8 @@ def add_lossy_bidirectional_link_constraints(n): if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: return - carriers = n.links.loc[n.links.reversed, "carrier"].unique() + reversed_links = n.links.reversed.fillna(0).astype(bool) + carriers = n.links.loc[reversed_links, "carrier"].unique() forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" From 71985d5e3aa173386c0decb301750f09c5408041 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 29 Dec 2023 12:34:14 +0100 Subject: [PATCH 12/64] validate checksums for zenodo downloads --- doc/release_notes.rst | 3 ++ rules/common.smk | 3 ++ rules/retrieve.smk | 6 ++- scripts/_helpers.py | 59 +++++++++++++++++++++ scripts/retrieve_databundle.py | 4 +- scripts/retrieve_gas_infrastructure_data.py | 4 +- scripts/retrieve_sector_databundle.py | 4 +- 7 files changed, 79 insertions(+), 4 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index d7931f0ef..7b1b6d730 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -54,6 +54,9 @@ Upcoming Release reconnected to the main Ukrainian grid with the configuration option `reconnect_crimea`. +* Validate downloads from Zenodo using MD5 checksums. This identifies corrupted + or incomplete downloads. + **Bugs and Compatibility** diff --git a/rules/common.smk b/rules/common.smk index d34160507..a1537c10e 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -2,6 +2,9 @@ # # SPDX-License-Identifier: MIT +import os, sys +sys.path.insert(0, os.path.abspath("scripts")) +from _helpers import validate_checksum def memory(w): factor = 3.0 diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4fe0cd7b1..e2e634274 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -77,6 +77,7 @@ if config["enable"]["retrieve"] and config["enable"].get("retrieve_cutout", True retries: 2 run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"] and config["enable"].get("retrieve_cost_data", True): @@ -113,7 +114,7 @@ if config["enable"]["retrieve"] and config["enable"].get( static=True, ), output: - protected(RESOURCES + "natura.tiff"), + RESOURCES + "natura.tiff", log: LOGS + "retrieve_natura_raster.log", resources: @@ -121,6 +122,7 @@ if config["enable"]["retrieve"] and config["enable"].get( retries: 2 run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"] and config["enable"].get( @@ -226,6 +228,7 @@ if config["enable"]["retrieve"]: retries: 2 run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"]: @@ -243,6 +246,7 @@ if config["enable"]["retrieve"]: + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", run: move(input[0], output[0]) + validate_checksum(output[0], input[0]) if config["enable"]["retrieve"]: diff --git a/scripts/_helpers.py b/scripts/_helpers.py index 398f3a30f..d906872d9 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -4,6 +4,7 @@ # SPDX-License-Identifier: MIT import contextlib +import hashlib import logging import os import urllib @@ -11,6 +12,7 @@ import pandas as pd import pytz +import requests import yaml from pypsa.components import component_attrs, components from pypsa.descriptors import Dict @@ -318,3 +320,60 @@ def update_config_with_sector_opts(config, sector_opts): if o.startswith("CF+"): l = o.split("+")[1:] update_config(config, parse(l)) + + +def get_checksum_from_zenodo(file_url): + parts = file_url.split("/") + record_id = parts[parts.index("record") + 1] + filename = parts[-1] + + response = requests.get(f"https://zenodo.org/api/records/{record_id}", timeout=30) + response.raise_for_status() + data = response.json() + + for file in data["files"]: + if file["key"] == filename: + return file["checksum"] + return None + + +def validate_checksum(file_path, zenodo_url=None, checksum=None): + """ + Validate file checksum against provided or Zenodo-retrieved checksum. + Calculates the hash of a file using 64KB chunks. Compares it against a given + checksum or one from a Zenodo URL. + + Parameters + ---------- + file_path : str + Path to the file for checksum validation. + zenodo_url : str, optional + URL of the file on Zenodo to fetch the checksum. + checksum : str, optional + Checksum (format 'hash_type:checksum_value') for validation. + + Raises + ------ + AssertionError + If the checksum does not match, or if neither `checksum` nor `zenodo_url` is provided. + + + Examples + -------- + >>> validate_checksum('/path/to/file', checksum='md5:abc123...') + >>> validate_checksum('/path/to/file', zenodo_url='https://zenodo.org/record/12345/files/example.txt') + + If the checksum is invalid, an AssertionError will be raised. + """ + assert checksum or zenodo_url, "Either checksum or zenodo_url must be provided" + if zenodo_url: + checksum = get_checksum_from_zenodo(zenodo_url) + hash_type, checksum = checksum.split(":") + hasher = hashlib.new(hash_type) + with open(file_path, "rb") as f: + for chunk in iter(lambda: f.read(65536), b""): # 64kb chunks + hasher.update(chunk) + calculated_checksum = hasher.hexdigest() + assert ( + calculated_checksum == checksum + ), "Checksum is invalid. This may be due to an incomplete download. Delete the file and re-execute the rule." diff --git a/scripts/retrieve_databundle.py b/scripts/retrieve_databundle.py index 75d8519e1..25894063c 100644 --- a/scripts/retrieve_databundle.py +++ b/scripts/retrieve_databundle.py @@ -36,7 +36,7 @@ import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve +from _helpers import configure_logging, progress_retrieve, validate_checksum logger = logging.getLogger(__name__) @@ -65,6 +65,8 @@ disable_progress = snakemake.config["run"].get("disable_progressbar", False) progress_retrieve(url, tarball_fn, disable=disable_progress) + validate_checksum(tarball_fn, url) + logger.info("Extracting databundle.") tarfile.open(tarball_fn).extractall(to_fn) diff --git a/scripts/retrieve_gas_infrastructure_data.py b/scripts/retrieve_gas_infrastructure_data.py index 42b726dbd..d984b9feb 100644 --- a/scripts/retrieve_gas_infrastructure_data.py +++ b/scripts/retrieve_gas_infrastructure_data.py @@ -11,7 +11,7 @@ import zipfile from pathlib import Path -from _helpers import progress_retrieve +from _helpers import progress_retrieve, validate_checksum logger = logging.getLogger(__name__) @@ -35,6 +35,8 @@ disable_progress = snakemake.config["run"].get("disable_progressbar", False) progress_retrieve(url, zip_fn, disable=disable_progress) + validate_checksum(zip_fn, url) + logger.info("Extracting databundle.") zipfile.ZipFile(zip_fn).extractall(to_fn) diff --git a/scripts/retrieve_sector_databundle.py b/scripts/retrieve_sector_databundle.py index 0d172c8d1..cb6cc969f 100644 --- a/scripts/retrieve_sector_databundle.py +++ b/scripts/retrieve_sector_databundle.py @@ -13,7 +13,7 @@ import tarfile from pathlib import Path -from _helpers import configure_logging, progress_retrieve +from _helpers import configure_logging, progress_retrieve, validate_checksum if __name__ == "__main__": if "snakemake" not in globals(): @@ -34,6 +34,8 @@ disable_progress = snakemake.config["run"].get("disable_progressbar", False) progress_retrieve(url, tarball_fn, disable=disable_progress) + validate_checksum(tarball_fn, url) + logger.info("Extracting databundle.") tarfile.open(tarball_fn).extractall(to_fn) From de3b6c9573f075d2bdb93ea0af852c8b31ca03b3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 29 Dec 2023 11:38:41 +0000 Subject: [PATCH 13/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- rules/common.smk | 2 ++ scripts/_helpers.py | 11 +++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/rules/common.smk b/rules/common.smk index a1537c10e..2c8cf69c1 100644 --- a/rules/common.smk +++ b/rules/common.smk @@ -3,9 +3,11 @@ # SPDX-License-Identifier: MIT import os, sys + sys.path.insert(0, os.path.abspath("scripts")) from _helpers import validate_checksum + def memory(w): factor = 3.0 for o in w.opts.split("-"): diff --git a/scripts/_helpers.py b/scripts/_helpers.py index d906872d9..9945f70f0 100644 --- a/scripts/_helpers.py +++ b/scripts/_helpers.py @@ -340,8 +340,8 @@ def get_checksum_from_zenodo(file_url): def validate_checksum(file_path, zenodo_url=None, checksum=None): """ Validate file checksum against provided or Zenodo-retrieved checksum. - Calculates the hash of a file using 64KB chunks. Compares it against a given - checksum or one from a Zenodo URL. + Calculates the hash of a file using 64KB chunks. Compares it against a + given checksum or one from a Zenodo URL. Parameters ---------- @@ -360,8 +360,11 @@ def validate_checksum(file_path, zenodo_url=None, checksum=None): Examples -------- - >>> validate_checksum('/path/to/file', checksum='md5:abc123...') - >>> validate_checksum('/path/to/file', zenodo_url='https://zenodo.org/record/12345/files/example.txt') + >>> validate_checksum("/path/to/file", checksum="md5:abc123...") + >>> validate_checksum( + ... "/path/to/file", + ... zenodo_url="https://zenodo.org/record/12345/files/example.txt", + ... ) If the checksum is invalid, an AssertionError will be raised. """ From adf2c96dc13deb0b4cb3df077e3c552a3dbcddd1 Mon Sep 17 00:00:00 2001 From: Jess <122939887+jessLryan@users.noreply.github.com> Date: Mon, 1 Jan 2024 12:14:38 +0000 Subject: [PATCH 14/64] Update index.rst fixed 2 broken links --- doc/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/index.rst b/doc/index.rst index d30dd8b97..909a96a2d 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -116,7 +116,7 @@ of the individual parts. topics we are working on. Please feel free to help or make suggestions. This project is currently maintained by the `Department of Digital -Transformation in Energy Systems `_ at the +Transformation in Energy Systems `_ at the `Technische Universität Berlin `_. Previous versions were developed within the `IAI `_ at the `Karlsruhe Institute of Technology (KIT) `_ which was funded by From e3539b0e69cb753e01af5b1c9e3538087b72165e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:31:16 +0100 Subject: [PATCH 15/64] heat vent: add bus --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index d5ca27a7b..329560c7b 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1697,6 +1697,7 @@ def add_heat(n, costs): n.madd( "Generator", nodes[name] + f" {name} heat vent", + bus=nodes[name] + f" {name} heat", location=nodes[name], carrier=name + " heat vent", p_nom_extendable=True, From 30c1a1c857b01e944284d984c540a86e882a2258 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 16:31:48 +0100 Subject: [PATCH 16/64] address deprecation warnings --- config/config.default.yaml | 3 ++ scripts/build_biomass_potentials.py | 2 +- scripts/build_energy_totals.py | 56 ++++++++++++++--------------- scripts/build_line_rating.py | 2 +- scripts/build_retro_cost.py | 5 ++- scripts/build_ship_raster.py | 2 +- scripts/prepare_sector_network.py | 10 +++--- 7 files changed, 41 insertions(+), 39 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index a6df173ba..37664ad64 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -158,6 +158,7 @@ renewable: resource: method: wind turbine: Vestas_V112_3MW + add_cutout_windspeed: true capacity_per_sqkm: 3 # correction_factor: 0.93 corine: @@ -173,6 +174,7 @@ renewable: resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore + add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] @@ -188,6 +190,7 @@ renewable: resource: method: wind turbine: NREL_ReferenceTurbine_5MW_offshore + add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 corine: [44, 255] diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index d7c467cf6..aae1fb989 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -134,7 +134,7 @@ def disaggregate_nuts0(bio): # get population in nuts2 pop_nuts2 = pop.loc[pop.index.str.len() == 4] by_country = pop_nuts2.total.groupby(pop_nuts2.ct).sum() - pop_nuts2["fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country) + pop_nuts2.loc[:, "fraction"] = pop_nuts2.total / pop_nuts2.ct.map(by_country) # distribute nuts0 data to nuts2 by population bio_nodal = bio.loc[pop_nuts2.ct] diff --git a/scripts/build_energy_totals.py b/scripts/build_energy_totals.py index 6f9585c1d..67b864663 100644 --- a/scripts/build_energy_totals.py +++ b/scripts/build_energy_totals.py @@ -189,12 +189,12 @@ def idees_per_country(ct, year, base_dir): ct_totals["total residential water"] = df.at["Water heating"] assert df.index[23] == "Electricity" - ct_totals["electricity residential water"] = df[23] + ct_totals["electricity residential water"] = df.iloc[23] ct_totals["total residential cooking"] = df["Cooking"] assert df.index[30] == "Electricity" - ct_totals["electricity residential cooking"] = df[30] + ct_totals["electricity residential cooking"] = df.iloc[30] df = pd.read_excel(fn_residential, "RES_summary", index_col=0)[year] @@ -202,13 +202,13 @@ def idees_per_country(ct, year, base_dir): ct_totals["total residential"] = df[row] assert df.index[47] == "Electricity" - ct_totals["electricity residential"] = df[47] + ct_totals["electricity residential"] = df.iloc[47] assert df.index[46] == "Derived heat" - ct_totals["derived heat residential"] = df[46] + ct_totals["derived heat residential"] = df.iloc[46] assert df.index[50] == "Thermal uses" - ct_totals["thermal uses residential"] = df[50] + ct_totals["thermal uses residential"] = df.iloc[50] # services @@ -222,12 +222,12 @@ def idees_per_country(ct, year, base_dir): ct_totals["total services water"] = df["Hot water"] assert df.index[24] == "Electricity" - ct_totals["electricity services water"] = df[24] + ct_totals["electricity services water"] = df.iloc[24] ct_totals["total services cooking"] = df["Catering"] assert df.index[31] == "Electricity" - ct_totals["electricity services cooking"] = df[31] + ct_totals["electricity services cooking"] = df.iloc[31] df = pd.read_excel(fn_tertiary, "SER_summary", index_col=0)[year] @@ -235,13 +235,13 @@ def idees_per_country(ct, year, base_dir): ct_totals["total services"] = df[row] assert df.index[50] == "Electricity" - ct_totals["electricity services"] = df[50] + ct_totals["electricity services"] = df.iloc[50] assert df.index[49] == "Derived heat" - ct_totals["derived heat services"] = df[49] + ct_totals["derived heat services"] = df.iloc[49] assert df.index[53] == "Thermal uses" - ct_totals["thermal uses services"] = df[53] + ct_totals["thermal uses services"] = df.iloc[53] # agriculture, forestry and fishing @@ -282,28 +282,28 @@ def idees_per_country(ct, year, base_dir): ct_totals["total two-wheel"] = df["Powered 2-wheelers (Gasoline)"] assert df.index[19] == "Passenger cars" - ct_totals["total passenger cars"] = df[19] + ct_totals["total passenger cars"] = df.iloc[19] assert df.index[30] == "Battery electric vehicles" - ct_totals["electricity passenger cars"] = df[30] + ct_totals["electricity passenger cars"] = df.iloc[30] assert df.index[31] == "Motor coaches, buses and trolley buses" - ct_totals["total other road passenger"] = df[31] + ct_totals["total other road passenger"] = df.iloc[31] assert df.index[39] == "Battery electric vehicles" - ct_totals["electricity other road passenger"] = df[39] + ct_totals["electricity other road passenger"] = df.iloc[39] assert df.index[41] == "Light duty vehicles" - ct_totals["total light duty road freight"] = df[41] + ct_totals["total light duty road freight"] = df.iloc[41] assert df.index[49] == "Battery electric vehicles" - ct_totals["electricity light duty road freight"] = df[49] + ct_totals["electricity light duty road freight"] = df.iloc[49] row = "Heavy duty vehicles (Diesel oil incl. biofuels)" ct_totals["total heavy duty road freight"] = df[row] assert df.index[61] == "Passenger cars" - ct_totals["passenger car efficiency"] = df[61] + ct_totals["passenger car efficiency"] = df.iloc[61] df = pd.read_excel(fn_transport, "TrRail_ene", index_col=0)[year] @@ -312,39 +312,39 @@ def idees_per_country(ct, year, base_dir): ct_totals["electricity rail"] = df["Electricity"] assert df.index[15] == "Passenger transport" - ct_totals["total rail passenger"] = df[15] + ct_totals["total rail passenger"] = df.iloc[15] assert df.index[16] == "Metro and tram, urban light rail" assert df.index[19] == "Electric" assert df.index[20] == "High speed passenger trains" - ct_totals["electricity rail passenger"] = df[[16, 19, 20]].sum() + ct_totals["electricity rail passenger"] = df.iloc[[16, 19, 20]].sum() assert df.index[21] == "Freight transport" - ct_totals["total rail freight"] = df[21] + ct_totals["total rail freight"] = df.iloc[21] assert df.index[23] == "Electric" - ct_totals["electricity rail freight"] = df[23] + ct_totals["electricity rail freight"] = df.iloc[23] df = pd.read_excel(fn_transport, "TrAvia_ene", index_col=0)[year] assert df.index[6] == "Passenger transport" - ct_totals["total aviation passenger"] = df[6] + ct_totals["total aviation passenger"] = df.iloc[6] assert df.index[10] == "Freight transport" - ct_totals["total aviation freight"] = df[10] + ct_totals["total aviation freight"] = df.iloc[10] assert df.index[7] == "Domestic" - ct_totals["total domestic aviation passenger"] = df[7] + ct_totals["total domestic aviation passenger"] = df.iloc[7] assert df.index[8] == "International - Intra-EU" assert df.index[9] == "International - Extra-EU" - ct_totals["total international aviation passenger"] = df[[8, 9]].sum() + ct_totals["total international aviation passenger"] = df.iloc[[8, 9]].sum() assert df.index[11] == "Domestic and International - Intra-EU" - ct_totals["total domestic aviation freight"] = df[11] + ct_totals["total domestic aviation freight"] = df.iloc[11] assert df.index[12] == "International - Extra-EU" - ct_totals["total international aviation freight"] = df[12] + ct_totals["total international aviation freight"] = df.iloc[12] ct_totals["total domestic aviation"] = ( ct_totals["total domestic aviation freight"] @@ -364,7 +364,7 @@ def idees_per_country(ct, year, base_dir): df = pd.read_excel(fn_transport, "TrRoad_act", index_col=0)[year] assert df.index[85] == "Passenger cars" - ct_totals["passenger cars"] = df[85] + ct_totals["passenger cars"] = df.iloc[85] return pd.Series(ct_totals, name=ct) diff --git a/scripts/build_line_rating.py b/scripts/build_line_rating.py index 032ba39cf..c53d2899b 100755 --- a/scripts/build_line_rating.py +++ b/scripts/build_line_rating.py @@ -119,7 +119,7 @@ def calculate_line_rating(n, cutout): .apply(lambda x: int(re.findall(r"(\d+)-bundle", x)[0])) ) # Set default number of bundles per line - relevant_lines["n_bundle"].fillna(1, inplace=True) + relevant_lines["n_bundle"] = relevant_lines["n_bundle"].fillna(1) R *= relevant_lines["n_bundle"] R = calculate_resistance(T=353, R_ref=R) Imax = cutout.line_rating(shapes, R, D=0.0218, Ts=353, epsilon=0.8, alpha=0.8) diff --git a/scripts/build_retro_cost.py b/scripts/build_retro_cost.py index f5313c21f..03c46651a 100644 --- a/scripts/build_retro_cost.py +++ b/scripts/build_retro_cost.py @@ -836,8 +836,7 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor) F_red_temp = map_to_lstrength(l_strength, F_red_temp) Q_ht = ( - heat_transfer_perm2.groupby(level=1, axis=1) - .sum() + heat_transfer_perm2.T.groupby(level=1).sum().T .mul(F_red_temp.droplevel(0, axis=1)) .mul(temperature_factor.reindex(heat_transfer_perm2.index, level=0), axis=0) ) @@ -878,7 +877,7 @@ def calculate_gain_utilisation_factor(heat_transfer_perm2, Q_ht, Q_gain): Calculates gain utilisation factor nu. """ # time constant of the building tau [h] = c_m [Wh/(m^2K)] * 1 /(H_tr_e+H_tb*H_ve) [m^2 K /W] - tau = c_m / heat_transfer_perm2.groupby(level=1, axis=1).sum() + tau = c_m / heat_transfer_perm2.T.groupby(axis=1).sum().T alpha = alpha_H_0 + (tau / tau_H_0) # heat balance ratio gamma = (1 / Q_ht).mul(Q_gain.sum(axis=1), axis=0) diff --git a/scripts/build_ship_raster.py b/scripts/build_ship_raster.py index 90e006b0b..02f4d5d5d 100644 --- a/scripts/build_ship_raster.py +++ b/scripts/build_ship_raster.py @@ -64,7 +64,7 @@ with zipfile.ZipFile(snakemake.input.ship_density) as zip_f: zip_f.extract("shipdensity_global.tif") with rioxarray.open_rasterio("shipdensity_global.tif") as ship_density: - ship_density = ship_density.drop(["band"]).sel( + ship_density = ship_density.drop_vars(["band"]).sel( x=slice(min(xs), max(Xs)), y=slice(max(Ys), min(ys)) ) ship_density.rio.to_raster(snakemake.output[0]) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 329560c7b..b3a706d84 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1630,7 +1630,7 @@ def build_heat_demand(n): electric_nodes = n.loads.index[n.loads.carrier == "electricity"] n.loads_t.p_set[electric_nodes] = ( n.loads_t.p_set[electric_nodes] - - electric_heat_supply.groupby(level=1, axis=1).sum()[electric_nodes] + - electric_heat_supply.T.groupby(level=1).sum().T[electric_nodes] ) return heat_demand @@ -1724,15 +1724,15 @@ def add_heat(n, costs): if sector in name: heat_load = ( heat_demand[[sector + " water", sector + " space"]] - .groupby(level=1, axis=1) - .sum()[nodes[name]] + .T.groupby(level=1) + .sum().T[nodes[name]] .multiply(factor) ) if name == "urban central": heat_load = ( - heat_demand.groupby(level=1, axis=1) - .sum()[nodes[name]] + heat_demand.T.groupby(level=1) + .sum().T[nodes[name]] .multiply( factor * (1 + options["district_heating"]["district_heating_loss"]) ) From e580ac85d962e0ef9d24716125655a0e59712f8e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 15:33:08 +0000 Subject: [PATCH 17/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_retro_cost.py | 5 +++-- scripts/prepare_sector_network.py | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/scripts/build_retro_cost.py b/scripts/build_retro_cost.py index 03c46651a..3ca2b1741 100644 --- a/scripts/build_retro_cost.py +++ b/scripts/build_retro_cost.py @@ -836,8 +836,9 @@ def calculate_heat_losses(u_values, data_tabula, l_strength, temperature_factor) F_red_temp = map_to_lstrength(l_strength, F_red_temp) Q_ht = ( - heat_transfer_perm2.T.groupby(level=1).sum().T - .mul(F_red_temp.droplevel(0, axis=1)) + heat_transfer_perm2.T.groupby(level=1) + .sum() + .T.mul(F_red_temp.droplevel(0, axis=1)) .mul(temperature_factor.reindex(heat_transfer_perm2.index, level=0), axis=0) ) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index b3a706d84..2480754ca 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1725,14 +1725,16 @@ def add_heat(n, costs): heat_load = ( heat_demand[[sector + " water", sector + " space"]] .T.groupby(level=1) - .sum().T[nodes[name]] + .sum() + .T[nodes[name]] .multiply(factor) ) if name == "urban central": heat_load = ( heat_demand.T.groupby(level=1) - .sum().T[nodes[name]] + .sum() + .T[nodes[name]] .multiply( factor * (1 + options["district_heating"]["district_heating_loss"]) ) From f2a636c62cbc3dca93eaf7df7ad6686012f1e8da Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:12:49 +0100 Subject: [PATCH 18/64] bugfix: correct unit of capital_cost of Haber-Bosch --- scripts/prepare_sector_network.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 2480754ca..ac0b618b8 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -796,6 +796,8 @@ def add_ammonia(n, costs): "Bus", spatial.ammonia.nodes, location=spatial.ammonia.locations, carrier="NH3" ) + MWh_elec_per_MWh_NH3 = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + n.madd( "Link", nodes, @@ -805,14 +807,10 @@ def add_ammonia(n, costs): bus2=nodes + " H2", p_nom_extendable=True, carrier="Haber-Bosch", - efficiency=1 - / ( - cf_industry["MWh_elec_per_tNH3_electrolysis"] - / cf_industry["MWh_NH3_per_tNH3"] - ), # output: MW_NH3 per MW_elec + efficiency=1 / MWh_elec_per_MWh_NH3, efficiency2=-cf_industry["MWh_H2_per_tNH3_electrolysis"] / cf_industry["MWh_elec_per_tNH3_electrolysis"], # input: MW_H2 per MW_elec - capital_cost=costs.at["Haber-Bosch", "fixed"], + capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, lifetime=costs.at["Haber-Bosch", "lifetime"], ) From 2678fdef993eb2c0d1b325c4e260040ed2e19174 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 17:13:34 +0000 Subject: [PATCH 19/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index ac0b618b8..1dc2b3ef1 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -796,7 +796,9 @@ def add_ammonia(n, costs): "Bus", spatial.ammonia.nodes, location=spatial.ammonia.locations, carrier="NH3" ) - MWh_elec_per_MWh_NH3 = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + MWh_elec_per_MWh_NH3 = ( + cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + ) n.madd( "Link", From 0720ccb00d268c7f81fed419a313cb8e2c5d6924 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:14:11 +0100 Subject: [PATCH 20/64] add release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index a3659b9bf..c319bce92 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -38,6 +38,8 @@ Upcoming Release * Split configuration to enable SMR and SMR CC. +* Bugfix: The unit of the capital cost of Haber-Bosch plants was corrected. + * The configuration setting for country focus weights when clustering the network has been moved from ``focus_weights:`` to ``clustering: focus_weights:``. Backwards compatibility to old config files is maintained. From fd81058008b3532ecba0145e60541bea5e08f343 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:35:10 +0100 Subject: [PATCH 21/64] add VOM of PtX processes (closes #747) --- doc/release_notes.rst | 2 ++ scripts/prepare_sector_network.py | 20 +++++++++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c319bce92..494abde1c 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -44,6 +44,8 @@ Upcoming Release network has been moved from ``focus_weights:`` to ``clustering: focus_weights:``. Backwards compatibility to old config files is maintained. +* Add VOM as marginal cost to PtX processes. + * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. * Merged option to extend geographical scope to Ukraine and Moldova. These diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 1dc2b3ef1..8620d2401 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -813,6 +813,7 @@ def add_ammonia(n, costs): efficiency2=-cf_industry["MWh_H2_per_tNH3_electrolysis"] / cf_industry["MWh_elec_per_tNH3_electrolysis"], # input: MW_H2 per MW_elec capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, + marginal_cost=costs.at["Haber-Bosch", "VOM"] / MWh_elec_per_MWh_NH3, lifetime=costs.at["Haber-Bosch", "lifetime"], ) @@ -1023,7 +1024,7 @@ def insert_gas_distribution_costs(n, costs): f"Inserting gas distribution grid with investment cost factor of {f_costs}" ) - capital_cost = costs.loc["electricity distribution grid"]["fixed"] * f_costs + capital_cost = costs.at["electricity distribution grid", "fixed"] * f_costs # gas boilers gas_b = n.links.index[ @@ -1100,6 +1101,7 @@ def add_storage_and_grids(n, costs): efficiency=costs.at["OCGT", "efficiency"], capital_cost=costs.at["OCGT", "fixed"] * costs.at["OCGT", "efficiency"], # NB: fixed cost is per MWel + marginal_cost=costs.at["OCGT", "VOM"], lifetime=costs.at["OCGT", "lifetime"], ) @@ -2168,8 +2170,8 @@ def add_biomass(n, costs): bus1=spatial.gas.nodes, bus2="co2 atmosphere", carrier="biogas to gas", - capital_cost=costs.loc["biogas upgrading", "fixed"], - marginal_cost=costs.loc["biogas upgrading", "VOM"], + capital_cost=costs.at["biogas upgrading", "fixed"], + marginal_cost=costs.at["biogas upgrading", "VOM"], efficiency2=-costs.at["gas", "CO2 intensity"], p_nom_extendable=True, ) @@ -2318,7 +2320,7 @@ def add_biomass(n, costs): + costs.at["BtL", "CO2 stored"], p_nom_extendable=True, capital_cost=costs.at["BtL", "fixed"], - marginal_cost=costs.at["BtL", "efficiency"] * costs.loc["BtL", "VOM"], + marginal_cost=costs.at["BtL", "efficiency"] * costs.at["BtL", "VOM"], ) # TODO: Update with energy penalty @@ -2339,7 +2341,7 @@ def add_biomass(n, costs): p_nom_extendable=True, capital_cost=costs.at["BtL", "fixed"] + costs.at["biomass CHP capture", "fixed"] * costs.at["BtL", "CO2 stored"], - marginal_cost=costs.at["BtL", "efficiency"] * costs.loc["BtL", "VOM"], + marginal_cost=costs.at["BtL", "efficiency"] * costs.at["BtL", "VOM"], ) # BioSNG from solid biomass @@ -2358,7 +2360,7 @@ def add_biomass(n, costs): + costs.at["BioSNG", "CO2 stored"], p_nom_extendable=True, capital_cost=costs.at["BioSNG", "fixed"], - marginal_cost=costs.at["BioSNG", "efficiency"] * costs.loc["BioSNG", "VOM"], + marginal_cost=costs.at["BioSNG", "efficiency"] * costs.at["BioSNG", "VOM"], ) # TODO: Update with energy penalty for CC @@ -2382,7 +2384,7 @@ def add_biomass(n, costs): capital_cost=costs.at["BioSNG", "fixed"] + costs.at["biomass CHP capture", "fixed"] * costs.at["BioSNG", "CO2 stored"], - marginal_cost=costs.at["BioSNG", "efficiency"] * costs.loc["BioSNG", "VOM"], + marginal_cost=costs.at["BioSNG", "efficiency"] * costs.at["BioSNG", "VOM"], ) @@ -2615,6 +2617,8 @@ def add_industry(n, costs): p_min_pu=options.get("min_part_load_methanolisation", 0), capital_cost=costs.at["methanolisation", "fixed"] * options["MWh_MeOH_per_MWh_H2"], # EUR/MW_H2/a + marginal_cost=options["MWh_MeOH_per_MWh_H2"] + * costs.at["methanolisation", "VOM"], lifetime=costs.at["methanolisation", "lifetime"], efficiency=options["MWh_MeOH_per_MWh_H2"], efficiency2=-options["MWh_MeOH_per_MWh_H2"] / options["MWh_MeOH_per_MWh_e"], @@ -2732,6 +2736,8 @@ def add_industry(n, costs): efficiency=costs.at["Fischer-Tropsch", "efficiency"], capital_cost=costs.at["Fischer-Tropsch", "fixed"] * costs.at["Fischer-Tropsch", "efficiency"], # EUR/MW_H2/a + marginal_cost=costs.at["Fischer-Tropsch", "efficiency"] + * costs.at["Fischer-Tropsch", "VOM"], efficiency2=-costs.at["oil", "CO2 intensity"] * costs.at["Fischer-Tropsch", "efficiency"], p_nom_extendable=True, From 6714858e177ca9a040862906e9b326ce22ecca6a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 17:47:46 +0000 Subject: [PATCH 22/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 139e48367..62bca811e 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2163,7 +2163,6 @@ def add_biomass(n, costs): e_initial=solid_biomass_potentials_spatial, ) - n.madd( "Link", spatial.gas.biogas_to_gas, @@ -2178,10 +2177,9 @@ def add_biomass(n, costs): efficiency=costs.at["biogas", "efficiency"], efficiency2=-costs.at["gas", "CO2 intensity"], efficiency3=costs.at["biogas", "CO2 stored"], - p_nom_extendable=True + p_nom_extendable=True, ) - if options.get("biomass_upgrading_cc"): # Assuming for costs that the CO2 from upgrading is pure, such as in amine scrubbing. I.e., with and without CC is # equivalent. Adding biomass CHP capture because biogas is often small-scale and decentral so further From 9884dee7a0737aa2eca00e60828659b02fa0cecc Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:48:52 +0100 Subject: [PATCH 23/64] remove biogas upgrading CC in normal link --- scripts/prepare_sector_network.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 139e48367..1d6e4ab0d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2170,14 +2170,12 @@ def add_biomass(n, costs): bus0=spatial.gas.biogas, bus1=spatial.gas.nodes, bus2="co2 atmosphere", - bus3="co2 stored", carrier="biogas to gas", capital_cost=costs.at["biogas", "fixed"] + costs.at["biogas upgrading", "fixed"], marginal_cost=costs.at["biogas upgrading", "VOM"], efficiency=costs.at["biogas", "efficiency"], efficiency2=-costs.at["gas", "CO2 intensity"], - efficiency3=costs.at["biogas", "CO2 stored"], p_nom_extendable=True ) From b0cca00b7088767e46806dcebd64800d73247d42 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:51:03 +0100 Subject: [PATCH 24/64] add documentation and release note for biogas upgrading CC --- config/config.default.yaml | 2 ++ doc/configtables/sector.csv | 1 + doc/release_notes.rst | 2 ++ 3 files changed, 5 insertions(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index 37664ad64..dd36f1d8c 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -497,6 +497,7 @@ sector: gas_distribution_grid_cost_factor: 1.0 biomass_spatial: false biomass_transport: false + biomass_upgrading_cc: false conventional_generation: OCGT: gas biomass_to_liquid: false @@ -778,6 +779,7 @@ plotting: fossil gas: '#e05b09' natural gas: '#e05b09' biogas to gas: '#e36311' + biogas to gas CC: '#e51245' CCGT: '#a85522' CCGT marginal: '#a85522' allam: '#B98F76' diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 856ea0749..890b448c8 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -118,6 +118,7 @@ gas_distribution_grid _cost_factor,,,Multiplier for the investment cost of the g ,,, biomass_spatial,--,"{true, false}",Add option for resolving biomass demand regionally biomass_transport,--,"{true, false}",Add option for transporting solid biomass between nodes +biomass_upgrading_cc,--,"{true, false}",Add option to capture CO2 from biomass upgrading conventional_generation,,,Add a more detailed description of conventional carriers. Any power generation requires the consumption of fuel from nodes representing that fuel. biomass_to_liquid,--,"{true, false}",Add option for transforming solid biomass into liquid fuel with the same properties as oil biosng,--,"{true, false}",Add option for transforming solid biomass into synthesis gas with the same properties as natural gas diff --git a/doc/release_notes.rst b/doc/release_notes.rst index c319bce92..cfb67d771 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -46,6 +46,8 @@ Upcoming Release * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. +* Add option to capture CO2 contained in biogas when upgrading (``sector: biogas_to_gas_cc``). + * Merged option to extend geographical scope to Ukraine and Moldova. These countries are excluded by default and is currently constrained to power-sector only parts of the workflow. A special config file From a10a60b95139f1cead8158607fc12cb0b5c5b069 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 18:52:22 +0100 Subject: [PATCH 25/64] rename setting from biomass_upgrading_cc to biogas_upgrading_cc --- config/config.default.yaml | 2 +- doc/configtables/sector.csv | 2 +- scripts/prepare_sector_network.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index dd36f1d8c..7bfd3f019 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -497,7 +497,7 @@ sector: gas_distribution_grid_cost_factor: 1.0 biomass_spatial: false biomass_transport: false - biomass_upgrading_cc: false + biogas_upgrading_cc: false conventional_generation: OCGT: gas biomass_to_liquid: false diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 890b448c8..280c19064 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -118,7 +118,7 @@ gas_distribution_grid _cost_factor,,,Multiplier for the investment cost of the g ,,, biomass_spatial,--,"{true, false}",Add option for resolving biomass demand regionally biomass_transport,--,"{true, false}",Add option for transporting solid biomass between nodes -biomass_upgrading_cc,--,"{true, false}",Add option to capture CO2 from biomass upgrading +biogas_upgrading_cc,--,"{true, false}",Add option to capture CO2 from biomass upgrading conventional_generation,,,Add a more detailed description of conventional carriers. Any power generation requires the consumption of fuel from nodes representing that fuel. biomass_to_liquid,--,"{true, false}",Add option for transforming solid biomass into liquid fuel with the same properties as oil biosng,--,"{true, false}",Add option for transforming solid biomass into synthesis gas with the same properties as natural gas diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 2f4ce271a..aaaf37730 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2178,7 +2178,7 @@ def add_biomass(n, costs): p_nom_extendable=True, ) - if options.get("biomass_upgrading_cc"): + if options.get("biogas_upgrading_cc"): # Assuming for costs that the CO2 from upgrading is pure, such as in amine scrubbing. I.e., with and without CC is # equivalent. Adding biomass CHP capture because biogas is often small-scale and decentral so further # from e.g. CO2 grid or buyers. This is a proxy for the added cost for e.g. a raw biogas pipeline to a central upgrading facility From 5e4a81f82896485dcaa850394449a19ec194e852 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:04:34 +0100 Subject: [PATCH 26/64] haber-bosch: use DECHEMA source for hydrogen input --- config/config.default.yaml | 2 +- scripts/prepare_sector_network.py | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 37664ad64..97efa555e 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -547,7 +547,7 @@ industry: MWh_NH3_per_tNH3: 5.166 MWh_CH4_per_tNH3_SMR: 10.8 MWh_elec_per_tNH3_SMR: 0.7 - MWh_H2_per_tNH3_electrolysis: 6.5 + MWh_H2_per_tNH3_electrolysis: 5.93 MWh_elec_per_tNH3_electrolysis: 1.17 MWh_NH3_per_MWh_H2_cracker: 1.46 # https://github.com/euronion/trace/blob/44a5ff8401762edbef80eff9cfe5a47c8d3c8be4/data/efficiencies.csv NH3_process_emissions: 24.5 diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 1dc2b3ef1..442bc5643 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -810,8 +810,7 @@ def add_ammonia(n, costs): p_nom_extendable=True, carrier="Haber-Bosch", efficiency=1 / MWh_elec_per_MWh_NH3, - efficiency2=-cf_industry["MWh_H2_per_tNH3_electrolysis"] - / cf_industry["MWh_elec_per_tNH3_electrolysis"], # input: MW_H2 per MW_elec + efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / MWh_elec_per_MWh_NH3, capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, lifetime=costs.at["Haber-Bosch", "lifetime"], ) From 438b40cdb1bcdfe484db3585364b10cf8e4faca7 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:09:46 +0100 Subject: [PATCH 27/64] haber-bosch: use DECHEMA source for electricity input --- config/config.default.yaml | 2 +- scripts/prepare_sector_network.py | 10 +++------- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 97efa555e..31858a5ba 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -548,7 +548,7 @@ industry: MWh_CH4_per_tNH3_SMR: 10.8 MWh_elec_per_tNH3_SMR: 0.7 MWh_H2_per_tNH3_electrolysis: 5.93 - MWh_elec_per_tNH3_electrolysis: 1.17 + MWh_elec_per_tNH3_electrolysis: 0.2473 MWh_NH3_per_MWh_H2_cracker: 1.46 # https://github.com/euronion/trace/blob/44a5ff8401762edbef80eff9cfe5a47c8d3c8be4/data/efficiencies.csv NH3_process_emissions: 24.5 petrochemical_process_emissions: 25.5 diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 442bc5643..d23143ff9 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -796,10 +796,6 @@ def add_ammonia(n, costs): "Bus", spatial.ammonia.nodes, location=spatial.ammonia.locations, carrier="NH3" ) - MWh_elec_per_MWh_NH3 = ( - cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] - ) - n.madd( "Link", nodes, @@ -809,9 +805,9 @@ def add_ammonia(n, costs): bus2=nodes + " H2", p_nom_extendable=True, carrier="Haber-Bosch", - efficiency=1 / MWh_elec_per_MWh_NH3, - efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / MWh_elec_per_MWh_NH3, - capital_cost=costs.at["Haber-Bosch", "fixed"] / MWh_elec_per_MWh_NH3, + efficiency=1 / costs.at["Haber-Bosch", "electricity-input"], + efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / costs.at["Haber-Bosch", "electricity-input"], + capital_cost=costs.at["Haber-Bosch", "fixed"] / costs.at["Haber-Bosch", "electricity-input"], lifetime=costs.at["Haber-Bosch", "lifetime"], ) From 815b8283115b427189f7fe364c6ca1070427e2d8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:11:42 +0000 Subject: [PATCH 28/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 49d267266..e9d97ade0 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -808,9 +808,12 @@ def add_ammonia(n, costs): p_nom_extendable=True, carrier="Haber-Bosch", efficiency=1 / costs.at["Haber-Bosch", "electricity-input"], - efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] / costs.at["Haber-Bosch", "electricity-input"], - capital_cost=costs.at["Haber-Bosch", "fixed"] / costs.at["Haber-Bosch", "electricity-input"], - marginal_cost=costs.at["Haber-Bosch", "VOM"] / costs.at["Haber-Bosch", "electricity-input"], + efficiency2=-costs.at["Haber-Bosch", "hydrogen-input"] + / costs.at["Haber-Bosch", "electricity-input"], + capital_cost=costs.at["Haber-Bosch", "fixed"] + / costs.at["Haber-Bosch", "electricity-input"], + marginal_cost=costs.at["Haber-Bosch", "VOM"] + / costs.at["Haber-Bosch", "electricity-input"], lifetime=costs.at["Haber-Bosch", "lifetime"], ) From ebc25fbf61b469de27c3b0e69bed336d7f3b167e Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:12:41 +0100 Subject: [PATCH 29/64] add release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index b7035974d..634209c7f 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -48,6 +48,8 @@ Upcoming Release * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. +* Switch to using hydrogen and electricity inputs for Haber-Bosch from https://github.com/PyPSA/technology-data. + * Add option to capture CO2 contained in biogas when upgrading (``sector: biogas_to_gas_cc``). * Merged option to extend geographical scope to Ukraine and Moldova. These From c7790d7c60f93bac41e2bac423848efa696f1f3d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:16:42 +0100 Subject: [PATCH 30/64] change default offshore turbine to NREL Reference 2020 ATB 5.5 MW --- config/config.default.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 2a3be87bf..a6c3c1d65 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -173,7 +173,7 @@ renewable: cutout: europe-2013-era5 resource: method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore + turbine: NREL_ReferenceTurbine_2020ATB_5.5MW add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 @@ -189,7 +189,7 @@ renewable: cutout: europe-2013-era5 resource: method: wind - turbine: NREL_ReferenceTurbine_5MW_offshore + turbine: NREL_ReferenceTurbine_2020ATB_5.5MW add_cutout_windspeed: true capacity_per_sqkm: 2 correction_factor: 0.8855 From 37df47110cee21c8dbf923462d7f60d2d414a7dd Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 13 Sep 2023 10:49:54 +0200 Subject: [PATCH 31/64] biomass_boiler: add pelletizing cost --- scripts/prepare_sector_network.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 9c1a85d70..ae5d12dfa 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2333,6 +2333,7 @@ def add_biomass(n, costs): efficiency=costs.at["biomass boiler", "efficiency"], capital_cost=costs.at["biomass boiler", "efficiency"] * costs.at["biomass boiler", "fixed"], + marginal_cost=costs.at["biomass boiler", "pelletizing cost"], lifetime=costs.at["biomass boiler", "lifetime"], ) From 4988e77be5e2ae1ba9089deca3dba86a47925c62 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:33:02 +0100 Subject: [PATCH 32/64] add release note --- doc/release_notes.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index b7035974d..782ebdeee 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -46,6 +46,8 @@ Upcoming Release * Add VOM as marginal cost to PtX processes. +* Add pelletizing costs for biomass boilers. + * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. * Add option to capture CO2 contained in biogas when upgrading (``sector: biogas_to_gas_cc``). From 872c92d1c047e056d0604bebd43bcf16f855d2b2 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:45:02 +0100 Subject: [PATCH 33/64] extended waste heat from PtX, revised minimum part loads --- config/config.default.yaml | 10 +++++--- doc/release_notes.rst | 8 +++++++ scripts/prepare_sector_network.py | 38 +++++++++++++++++++++++++++++++ 3 files changed, 53 insertions(+), 3 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index 2a3be87bf..873498564 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -480,11 +480,15 @@ sector: - nearshore # within 50 km of sea # - offshore ammonia: false - min_part_load_fischer_tropsch: 0.9 - min_part_load_methanolisation: 0.5 + min_part_load_fischer_tropsch: 0.7 + min_part_load_methanolisation: 0.3 + min_part_load_methanation: 0.3 use_fischer_tropsch_waste_heat: true + use_haber_bosch_waste_heat: true + use_methanolisation_waste_heat: true + use_methanation_waste_heat: true use_fuel_cell_waste_heat: true - use_electrolysis_waste_heat: false + use_electrolysis_waste_heat: true electricity_distribution_grid: true electricity_distribution_grid_cost_factor: 1.0 electricity_grid_connection: true diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 634209c7f..5e2c1a6bc 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -44,6 +44,14 @@ Upcoming Release network has been moved from ``focus_weights:`` to ``clustering: focus_weights:``. Backwards compatibility to old config files is maintained. +* Extend options for waste usage from Haber-Bosch, methanolisation and methanation. + +* Use electrolysis waste heat by default. + +* Add new ``sector_opts`` wildcard option "nowasteheat" to disable all waste heat usage. + +* Set minimum part loads for PtX processes to 30% for methanolisation and methanation, and to 70% for Fischer-Tropsch synthesis. + * Add VOM as marginal cost to PtX processes. * The ``mock_snakemake`` function can now be used with a Snakefile from a different directory using the new ``root_dir`` argument. diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e9d97ade0..d797e30a9 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1345,6 +1345,7 @@ def add_storage_and_grids(n, costs): bus2=spatial.co2.nodes, p_nom_extendable=True, carrier="Sabatier", + p_min_pu=options.get("min_part_load_methanation", 0), efficiency=costs.at["methanation", "efficiency"], efficiency2=-costs.at["methanation", "efficiency"] * costs.at["gas", "CO2 intensity"], @@ -2982,6 +2983,34 @@ def add_waste_heat(n): 0.95 - n.links.loc[urban_central + " Fischer-Tropsch", "efficiency"] ) + if options["use_methanation_waste_heat"]: + n.links.loc[urban_central + " Sabatier", "bus3"] = ( + urban_central + " urban central heat" + ) + n.links.loc[urban_central + " Sabatier", "efficiency3"] = ( + 0.95 - n.links.loc[urban_central + " Sabatier", "efficiency"] + ) + + # DEA quotes 15% of total input (11% of which are high-value heat) + if options["use_haber_bosch_waste_heat"]: + n.links.loc[urban_central + " Haber-Bosch", "bus3"] = ( + urban_central + " urban central heat" + ) + total_energy_input = (cf_industry["MWh_H2_per_tNH3_electrolysis"] + cf_industry["MWh_elec_per_tNH3_electrolysis"]) / cf_industry["MWh_NH3_per_tNH3"] + electricity_input = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + n.links.loc[urban_central + " Haber-Bosch", "efficiency3"] = ( + 0.15 * total_energy_input / electricity_input + ) + + if options["use_methanolisation_waste_heat"]: + n.links.loc[urban_central + " methanolisation", "bus4"] = ( + urban_central + " urban central heat" + ) + n.links.loc[urban_central + " methanolisation", "efficiency4"] = ( + costs.at["methanolisation", "heat-output"] + / costs.at["methanolisation", "hydrogen-input"] + ) + # TODO integrate usable waste heat efficiency into technology-data from DEA if options.get("use_electrolysis_waste_heat", False): n.links.loc[urban_central + " H2 Electrolysis", "bus2"] = ( @@ -3426,6 +3455,15 @@ def set_temporal_aggregation(n, opts, solver_name): if "nodistrict" in opts: options["district_heating"]["progress"] = 0.0 + if "nowasteheat" in opts: + logger.info("Disabling waste heat.") + options["use_fischer_tropsch_waste_heat"] = False + options["use_methanolisation_waste_heat"] = False + options["use_haber_bosch_waste_heat"] = False + options["use_methanation_waste_heat"] = False + options["use_fuel_cell_waste_heat"] = False + options["use_electrolysis_waste_heat"] = False + if "T" in opts: add_land_transport(n, costs) From 777899f686b4641d9fc52c09b6c9db6a30be4e1d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:45:53 +0000 Subject: [PATCH 34/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index d797e30a9..c39ac9a0b 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2996,8 +2996,14 @@ def add_waste_heat(n): n.links.loc[urban_central + " Haber-Bosch", "bus3"] = ( urban_central + " urban central heat" ) - total_energy_input = (cf_industry["MWh_H2_per_tNH3_electrolysis"] + cf_industry["MWh_elec_per_tNH3_electrolysis"]) / cf_industry["MWh_NH3_per_tNH3"] - electricity_input = cf_industry["MWh_elec_per_tNH3_electrolysis"] / cf_industry["MWh_NH3_per_tNH3"] + total_energy_input = ( + cf_industry["MWh_H2_per_tNH3_electrolysis"] + + cf_industry["MWh_elec_per_tNH3_electrolysis"] + ) / cf_industry["MWh_NH3_per_tNH3"] + electricity_input = ( + cf_industry["MWh_elec_per_tNH3_electrolysis"] + / cf_industry["MWh_NH3_per_tNH3"] + ) n.links.loc[urban_central + " Haber-Bosch", "efficiency3"] = ( 0.15 * total_energy_input / electricity_input ) From 71b27b524ead6afe24a2e065198b299389c4fda6 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Tue, 2 Jan 2024 19:57:40 +0100 Subject: [PATCH 35/64] prevent failure if potential waste heat technologies not present --- scripts/prepare_sector_network.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index c39ac9a0b..aa3e65fd1 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2974,8 +2974,10 @@ def add_waste_heat(n): if not urban_central.empty: urban_central = urban_central.str[: -len(" urban central heat")] + link_carriers = n.links.carrier.unique() + # TODO what is the 0.95 and should it be a config option? - if options["use_fischer_tropsch_waste_heat"]: + if options["use_fischer_tropsch_waste_heat"] and "Fischer-Tropsch" in link_carriers: n.links.loc[urban_central + " Fischer-Tropsch", "bus3"] = ( urban_central + " urban central heat" ) @@ -2983,7 +2985,7 @@ def add_waste_heat(n): 0.95 - n.links.loc[urban_central + " Fischer-Tropsch", "efficiency"] ) - if options["use_methanation_waste_heat"]: + if options["use_methanation_waste_heat"] and "Sabatier" in link_carriers: n.links.loc[urban_central + " Sabatier", "bus3"] = ( urban_central + " urban central heat" ) @@ -2992,7 +2994,7 @@ def add_waste_heat(n): ) # DEA quotes 15% of total input (11% of which are high-value heat) - if options["use_haber_bosch_waste_heat"]: + if options["use_haber_bosch_waste_heat"] and "Haber-Bosch" in link_carriers: n.links.loc[urban_central + " Haber-Bosch", "bus3"] = ( urban_central + " urban central heat" ) @@ -3008,7 +3010,7 @@ def add_waste_heat(n): 0.15 * total_energy_input / electricity_input ) - if options["use_methanolisation_waste_heat"]: + if options["use_methanolisation_waste_heat"] and "methanolisation" in link_carriers: n.links.loc[urban_central + " methanolisation", "bus4"] = ( urban_central + " urban central heat" ) @@ -3018,7 +3020,7 @@ def add_waste_heat(n): ) # TODO integrate usable waste heat efficiency into technology-data from DEA - if options.get("use_electrolysis_waste_heat", False): + if options.get("use_electrolysis_waste_heat", False) and "H2 Electrolysis" in link_carriers: n.links.loc[urban_central + " H2 Electrolysis", "bus2"] = ( urban_central + " urban central heat" ) @@ -3026,7 +3028,7 @@ def add_waste_heat(n): 0.84 - n.links.loc[urban_central + " H2 Electrolysis", "efficiency"] ) - if options["use_fuel_cell_waste_heat"]: + if options["use_fuel_cell_waste_heat"] and "H2 Fuel Cell" in link_carriers: n.links.loc[urban_central + " H2 Fuel Cell", "bus2"] = ( urban_central + " urban central heat" ) From fb5b10780536f1c3f337f7cbba5da185876795ba Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 2 Jan 2024 18:59:36 +0000 Subject: [PATCH 36/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index aa3e65fd1..4e7ef6c68 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2977,7 +2977,10 @@ def add_waste_heat(n): link_carriers = n.links.carrier.unique() # TODO what is the 0.95 and should it be a config option? - if options["use_fischer_tropsch_waste_heat"] and "Fischer-Tropsch" in link_carriers: + if ( + options["use_fischer_tropsch_waste_heat"] + and "Fischer-Tropsch" in link_carriers + ): n.links.loc[urban_central + " Fischer-Tropsch", "bus3"] = ( urban_central + " urban central heat" ) @@ -3010,7 +3013,10 @@ def add_waste_heat(n): 0.15 * total_energy_input / electricity_input ) - if options["use_methanolisation_waste_heat"] and "methanolisation" in link_carriers: + if ( + options["use_methanolisation_waste_heat"] + and "methanolisation" in link_carriers + ): n.links.loc[urban_central + " methanolisation", "bus4"] = ( urban_central + " urban central heat" ) @@ -3020,7 +3026,10 @@ def add_waste_heat(n): ) # TODO integrate usable waste heat efficiency into technology-data from DEA - if options.get("use_electrolysis_waste_heat", False) and "H2 Electrolysis" in link_carriers: + if ( + options.get("use_electrolysis_waste_heat", False) + and "H2 Electrolysis" in link_carriers + ): n.links.loc[urban_central + " H2 Electrolysis", "bus2"] = ( urban_central + " urban central heat" ) From fa03c61187a232c452714979515967910474f14b Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 31 Jul 2023 10:52:37 +0200 Subject: [PATCH 37/64] gas_input: switch production data from scigrid to gem --- rules/build_sector.smk | 3 +- scripts/build_gas_input_locations.py | 56 +++++++++++++++++++++------- 2 files changed, 44 insertions(+), 15 deletions(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index dd49fc6f0..1e8c70ba0 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -85,12 +85,11 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: rule build_gas_input_locations: input: - lng=HTTP.remote( + gem=HTTP.remote( "https://globalenergymonitor.org/wp-content/uploads/2023/07/Europe-Gas-Tracker-2023-03-v3.xlsx", keep_local=True, ), entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", - production="data/gas_network/scigrid-gas/data/IGGIELGN_Productions.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_offshore=RESOURCES diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index a3b945abc..07707658d 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -23,11 +23,10 @@ def read_scigrid_gas(fn): return df -def build_gem_lng_data(lng_fn): - df = pd.read_excel(lng_fn[0], sheet_name="LNG terminals - data") +def build_gem_lng_data(fn): + df = pd.read_excel(fn[0], sheet_name="LNG terminals - data") df = df.set_index("ComboID") - remove_status = ["Cancelled"] remove_country = ["Cyprus", "Turkey"] remove_terminal = ["Puerto de la Luz LNG Terminal", "Gran Canaria LNG Terminal"] @@ -42,9 +41,43 @@ def build_gem_lng_data(lng_fn): return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") -def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries): +def build_gem_prod_data(fn): + df = pd.read_excel(fn[0], sheet_name="Gas extraction - main") + df = df.set_index("GEM Unit ID") + + remove_country = ["Cyprus", "Türkiye"] + remove_fuel_type = ["oil"] + + df = df.query( + "Status != 'shut in' \ + & 'Fuel type' != 'oil' \ + & Country != @remove_country \ + & ~Latitude.isna() \ + & ~Longitude.isna()" + ).copy() + + p = pd.read_excel(fn[0], sheet_name="Gas extraction - production") + p = p.set_index("GEM Unit ID") + p = p[p["Fuel description"] == 'gas' ] + + capacities = pd.DataFrame(index=df.index) + for key in ["production", "production design capacity", "reserves"]: + cap = p.loc[p["Production/reserves"] == key, "Quantity (converted)"].groupby("GEM Unit ID").sum().reindex(df.index) + # assume capacity such that 3% of reserves can be extracted per year (25% quantile) + annualization_factor = 0.03 if key == "reserves" else 1. + capacities[key] = cap * annualization_factor + + df["mcm_per_year"] = capacities["production"] \ + .combine_first(capacities["production design capacity"]) \ + .combine_first(capacities["reserves"]) + + geometry = gpd.points_from_xy(df["Longitude"], df["Latitude"]) + return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") + + +def build_gas_input_locations(gem_fn, entry_fn, countries): # LNG terminals - lng = build_gem_lng_data(lng_fn) + lng = build_gem_lng_data(gem_fn) # Entry points from outside the model scope entry = read_scigrid_gas(entry_fn) @@ -56,16 +89,14 @@ def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries): ] # production sites inside the model scope - prod = read_scigrid_gas(prod_fn) - prod = prod.loc[ - (prod.geometry.y > 35) & (prod.geometry.x < 30) & (prod.country_code != "DE") - ] + prod = build_gem_prod_data(gem_fn) mcm_per_day_to_mw = 437.5 # MCM/day to MWh/h + mcm_per_year_to_mw = 1.199 # MCM/year to MWh/h mtpa_to_mw = 1649.224 # mtpa to MWh/h lng["p_nom"] = lng["CapacityInMtpa"] * mtpa_to_mw entry["p_nom"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw - prod["p_nom"] = prod["max_supply_M_m3_per_d"] * mcm_per_day_to_mw + prod["p_nom"] = prod["mcm_per_year"] * mcm_per_year_to_mw lng["type"] = "lng" entry["type"] = "pipeline" @@ -83,7 +114,7 @@ def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries): snakemake = mock_snakemake( "build_gas_input_locations", simpl="", - clusters="37", + clusters="128", ) logging.basicConfig(level=snakemake.config["logging"]["level"]) @@ -104,9 +135,8 @@ def build_gas_input_locations(lng_fn, entry_fn, prod_fn, countries): countries = regions.index.str[:2].unique().str.replace("GB", "UK") gas_input_locations = build_gas_input_locations( - snakemake.input.lng, + snakemake.input.gem, snakemake.input.entry, - snakemake.input.production, countries, ) From 7c058f1ed333d41703e62d3d406d0d61a803da7d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 31 Jul 2023 12:20:43 +0200 Subject: [PATCH 38/64] add locations, capacities and costs of existing gas storage --- rules/build_sector.smk | 1 + scripts/build_gas_input_locations.py | 24 ++++++++++++++++-------- scripts/prepare_sector_network.py | 17 ++++++++++++----- 3 files changed, 29 insertions(+), 13 deletions(-) diff --git a/rules/build_sector.smk b/rules/build_sector.smk index 1e8c70ba0..ab8ff4edf 100644 --- a/rules/build_sector.smk +++ b/rules/build_sector.smk @@ -90,6 +90,7 @@ if config["sector"]["gas_network"] or config["sector"]["H2_retrofit"]: keep_local=True, ), entry="data/gas_network/scigrid-gas/data/IGGIELGN_BorderPoints.geojson", + storage="data/gas_network/scigrid-gas/data/IGGIELGN_Storages.geojson", regions_onshore=RESOURCES + "regions_onshore_elec_s{simpl}_{clusters}.geojson", regions_offshore=RESOURCES diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 07707658d..ad4492022 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -75,7 +75,7 @@ def build_gem_prod_data(fn): return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") -def build_gas_input_locations(gem_fn, entry_fn, countries): +def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries): # LNG terminals lng = build_gem_lng_data(gem_fn) @@ -88,23 +88,30 @@ def build_gas_input_locations(gem_fn, entry_fn, countries): | (entry.from_country == "NO") # malformed datapoint # entries from NO to GB ] + sto = read_scigrid_gas(sto_fn) + remove_country = ["RU", "UA", "TR", "BY"] + sto = sto.query("country_code != @remove_country") + # production sites inside the model scope prod = build_gem_prod_data(gem_fn) mcm_per_day_to_mw = 437.5 # MCM/day to MWh/h mcm_per_year_to_mw = 1.199 # MCM/year to MWh/h mtpa_to_mw = 1649.224 # mtpa to MWh/h - lng["p_nom"] = lng["CapacityInMtpa"] * mtpa_to_mw - entry["p_nom"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw - prod["p_nom"] = prod["mcm_per_year"] * mcm_per_year_to_mw + mcm_to_gwh = 11.36 # MCM to GWh + lng["capacity"] = lng["CapacityInMtpa"] * mtpa_to_mw + entry["capacity"] = entry["max_cap_from_to_M_m3_per_d"] * mcm_per_day_to_mw + prod["capacity"] = prod["mcm_per_year"] * mcm_per_year_to_mw + sto["capacity"] = sto["max_cushionGas_M_m3"] * mcm_to_gwh lng["type"] = "lng" entry["type"] = "pipeline" prod["type"] = "production" + sto["type"] = "storage" - sel = ["geometry", "p_nom", "type"] + sel = ["geometry", "capacity", "type"] - return pd.concat([prod[sel], entry[sel], lng[sel]], ignore_index=True) + return pd.concat([prod[sel], entry[sel], lng[sel], sto[sel]], ignore_index=True) if __name__ == "__main__": @@ -137,6 +144,7 @@ def build_gas_input_locations(gem_fn, entry_fn, countries): gas_input_locations = build_gas_input_locations( snakemake.input.gem, snakemake.input.entry, + snakemake.input.storage, countries, ) @@ -147,8 +155,8 @@ def build_gas_input_locations(gem_fn, entry_fn, countries): gas_input_nodes.to_file(snakemake.output.gas_input_nodes, driver="GeoJSON") gas_input_nodes_s = ( - gas_input_nodes.groupby(["bus", "type"])["p_nom"].sum().unstack() + gas_input_nodes.groupby(["bus", "type"])["capacity"].sum().unstack() ) - gas_input_nodes_s.columns.name = "p_nom" + gas_input_nodes_s.columns.name = "capacity" gas_input_nodes_s.to_csv(snakemake.output.gas_input_nodes_simplified) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e9d97ade0..9387d4b17 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -454,10 +454,11 @@ def add_carrier_buses(n, carrier, nodes=None): n.add("Carrier", carrier) unit = "MWh_LHV" if carrier == "gas" else "MWh_th" + # preliminary value for non-gas carriers to avoid zeros + capital_cost = costs.at["gas storage", "fixed"] if carrier == "gas" else 0.02 n.madd("Bus", nodes, location=location, carrier=carrier, unit=unit) - # capital cost could be corrected to e.g. 0.2 EUR/kWh * annuity and O&M n.madd( "Store", nodes + " Store", @@ -465,8 +466,7 @@ def add_carrier_buses(n, carrier, nodes=None): e_nom_extendable=True, e_cyclic=True, carrier=carrier, - capital_cost=0.2 - * costs.at[carrier, "discount rate"], # preliminary value to avoid zeros + capital_cost=capital_cost, ) n.madd( @@ -1162,7 +1162,7 @@ def add_storage_and_grids(n, costs): if options["gas_network"]: logger.info( - "Add natural gas infrastructure, incl. LNG terminals, production and entry-points." + "Add natural gas infrastructure, incl. LNG terminals, production, storage and entry-points." ) if options["H2_retrofit"]: @@ -1207,10 +1207,17 @@ def add_storage_and_grids(n, costs): remove_i = n.generators[gas_i & internal_i].index n.generators.drop(remove_i, inplace=True) - p_nom = gas_input_nodes.sum(axis=1).rename(lambda x: x + " gas") + input_types = ["lng", "pipeline", "production"] + p_nom = gas_input_nodes[input_types].sum(axis=1).rename(lambda x: x + " gas") n.generators.loc[gas_i, "p_nom_extendable"] = False n.generators.loc[gas_i, "p_nom"] = p_nom + # add existing gas storage capacity + gas_i = n.stores.carrier == "gas" + e_nom = gas_input_nodes["storage"].rename(lambda x: x + " gas Store").reindex(n.stores.index).fillna(0.) * 1e3 # MWh_LHV + e_nom.clip(upper=e_nom.quantile(0.98), inplace=True) # limit extremely large storage + n.stores.loc[gas_i, "e_nom_min"] = e_nom + # add candidates for new gas pipelines to achieve full connectivity G = nx.Graph() From 252f6d2c15838dc17ded00271f4edc05b417bec8 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 08:13:01 +0100 Subject: [PATCH 39/64] pre-commit formatting --- .pre-commit-config.yaml | 2 +- scripts/build_gas_input_locations.py | 21 ++++++++++++++------- scripts/prepare_sector_network.py | 12 ++++++++++-- 3 files changed, 25 insertions(+), 10 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7b9009c30..78e70b579 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -50,7 +50,7 @@ repos: - id: blackdoc # Formatting with "black" coding style -- repo: https://github.com/psf/black +- repo: https://github.com/psf/black-pre-commit-mirror rev: 23.12.1 hooks: # Format Python files diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index ad4492022..2f967c75f 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -47,7 +47,7 @@ def build_gem_prod_data(fn): remove_country = ["Cyprus", "Türkiye"] remove_fuel_type = ["oil"] - + df = df.query( "Status != 'shut in' \ & 'Fuel type' != 'oil' \ @@ -58,18 +58,25 @@ def build_gem_prod_data(fn): p = pd.read_excel(fn[0], sheet_name="Gas extraction - production") p = p.set_index("GEM Unit ID") - p = p[p["Fuel description"] == 'gas' ] + p = p[p["Fuel description"] == "gas"] capacities = pd.DataFrame(index=df.index) for key in ["production", "production design capacity", "reserves"]: - cap = p.loc[p["Production/reserves"] == key, "Quantity (converted)"].groupby("GEM Unit ID").sum().reindex(df.index) + cap = ( + p.loc[p["Production/reserves"] == key, "Quantity (converted)"] + .groupby("GEM Unit ID") + .sum() + .reindex(df.index) + ) # assume capacity such that 3% of reserves can be extracted per year (25% quantile) - annualization_factor = 0.03 if key == "reserves" else 1. + annualization_factor = 0.03 if key == "reserves" else 1.0 capacities[key] = cap * annualization_factor - df["mcm_per_year"] = capacities["production"] \ - .combine_first(capacities["production design capacity"]) \ + df["mcm_per_year"] = ( + capacities["production"] + .combine_first(capacities["production design capacity"]) .combine_first(capacities["reserves"]) + ) geometry = gpd.points_from_xy(df["Longitude"], df["Latitude"]) return gpd.GeoDataFrame(df, geometry=geometry, crs="EPSG:4326") @@ -88,7 +95,7 @@ def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries): | (entry.from_country == "NO") # malformed datapoint # entries from NO to GB ] - sto = read_scigrid_gas(sto_fn) + sto = read_scigrid_gas(sto_fn) remove_country = ["RU", "UA", "TR", "BY"] sto = sto.query("country_code != @remove_country") diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 9387d4b17..d5c979fa5 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1214,8 +1214,16 @@ def add_storage_and_grids(n, costs): # add existing gas storage capacity gas_i = n.stores.carrier == "gas" - e_nom = gas_input_nodes["storage"].rename(lambda x: x + " gas Store").reindex(n.stores.index).fillna(0.) * 1e3 # MWh_LHV - e_nom.clip(upper=e_nom.quantile(0.98), inplace=True) # limit extremely large storage + e_nom = ( + gas_input_nodes["storage"] + .rename(lambda x: x + " gas Store") + .reindex(n.stores.index) + .fillna(0.0) + * 1e3 + ) # MWh_LHV + e_nom.clip( + upper=e_nom.quantile(0.98), inplace=True + ) # limit extremely large storage n.stores.loc[gas_i, "e_nom_min"] = e_nom # add candidates for new gas pipelines to achieve full connectivity From 4983a2e02178dfe501358ce24636f877ecd4f478 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 08:19:52 +0100 Subject: [PATCH 40/64] add release note --- doc/release_notes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 634209c7f..368237917 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -65,6 +65,9 @@ Upcoming Release * Validate downloads from Zenodo using MD5 checksums. This identifies corrupted or incomplete downloads. +* Add locations, capacities and costs of existing gas storage using Global + Energy Monitor's `Europe Gas Tracker + `_. **Bugs and Compatibility** From 19b503d7580faf75dba539923d255c37f4038fd7 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Fri, 11 Aug 2023 12:07:03 +0200 Subject: [PATCH 41/64] retrieve.smk: add scigrid storages to files of interest --- rules/retrieve.smk | 1 + 1 file changed, 1 insertion(+) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 4c9ca8149..4ded2a46b 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -169,6 +169,7 @@ if config["enable"]["retrieve"] and ( "IGGIELGN_LNGs.geojson", "IGGIELGN_BorderPoints.geojson", "IGGIELGN_Productions.geojson", + "IGGIELGN_Storages.geojson", "IGGIELGN_PipeSegments.geojson", ] From 0d03d384cc0ce27e681b76d14418b6d1b5cf9d1c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:07:08 +0100 Subject: [PATCH 42/64] lossy_bidirectional_links: use original length for loss calculation --- scripts/prepare_sector_network.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 998f954e8..09de541aa 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3309,15 +3309,16 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) ) - rev_links.capital_cost = 0 - rev_links.length = 0 + rev_links["capital_cost"] = 0 + rev_links["length"] = 0 rev_links["reversed"] = True rev_links.index = rev_links.index.map(lambda x: x + "-reversed") n.links = pd.concat([n.links, rev_links], sort=False) n.links["reversed"] = n.links["reversed"].fillna(False) + n.links["length_original"] = n.links["length_original"].fillna(n.links.length) # do compression losses after concatenation to take electricity consumption at bus0 in either direction carrier_i = n.links.query("carrier == @carrier").index @@ -3326,7 +3327,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.buses.location ) # electricity n.links.loc[carrier_i, "efficiency2"] = ( - -compression_per_1000km * n.links.loc[carrier_i, "length"] / 1e3 + -compression_per_1000km * n.links.loc[carrier_i, "length_original"] / 1e3 ) From 2b2bad392f6c83771472d93ca2df597608ea6b26 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 08:08:21 +0000 Subject: [PATCH 43/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 09de541aa..bab8de7bd 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3309,7 +3309,9 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) + n.links.loc[carrier_i] + .copy() + .rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) ) rev_links["capital_cost"] = 0 rev_links["length"] = 0 From 075ffb5c043edf16b1a9b69c4be3ed31da7919b4 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:26:08 +0100 Subject: [PATCH 44/64] add release notes and documentation --- doc/release_notes.rst | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 505c747e4..82f632520 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -10,6 +10,13 @@ Release Notes Upcoming Release ================ +* Add option to specify losses for bidirectional links, e.g. pipelines or HVDC + links, in configuration file under ``sector: transmission_efficiency:``. Users + can specify static or length-dependent values as well as a length-dependent + electricity demand for compression, which is implemented as a multi-link to + the local electricity buses. The bidirectional links will then be split into + two unidirectional links with linked capacities. + * Updated Global Energy Monitor LNG terminal data to March 2023 version. * For industry distribution, use EPRTR as fallback if ETS data is not available. From d829d6fd3da28cc7103648132b07726deda1b9c8 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 09:28:24 +0100 Subject: [PATCH 45/64] add release notes and documentation --- doc/configtables/sector.csv | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index d610c8626..2767c6036 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -107,6 +107,11 @@ electricity_distribution _grid,--,"{true, false}",Add a simplified representatio electricity_distribution _grid_cost_factor,,,Multiplies the investment cost of the electricity distribution grid ,,, electricity_grid _connection,--,"{true, false}",Add the cost of electricity grid connection for onshore wind and solar +transmission_efficiency,,,Section to specify transmission losses or compression energy demands of bidirectional links. Splits them into two capacity-linked unidirectional links. +-- {carrier},--,str,The carrier of the link. +-- -- efficiency_static,p.u.,float,Length-independent transmission efficiency. +-- -- efficiency_per_1000km,p.u. per 1000 km,float,Length-dependent transmission efficiency ($\eta^{\text{length}}$) +-- -- compression_per_1000km,p.u. per 1000 km,float,Length-dependent electricity demand for compression ($\eta \cdot \text{length}$) implemented as multi-link to local electricity bus. H2_network,--,"{true, false}",Add option for new hydrogen pipelines gas_network,--,"{true, false}","Add existing natural gas infrastructure, incl. LNG terminals, production and entry-points. The existing gas network is added with a lossless transport model. A length-weighted `k-edge augmentation algorithm `_ can be run to add new candidate gas pipelines such that all regions of the model can be connected to the gas network. When activated, all the gas demands are regionally disaggregated as well." H2_retrofit,--,"{true, false}",Add option for retrofiting existing pipelines to transport hydrogen. From 9d939fa635f8a0b55f7049dd23a29facfeda1471 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:12:43 +0100 Subject: [PATCH 46/64] remove helmeth option --- config/config.default.yaml | 2 -- doc/configtables/sector.csv | 1 - doc/release_notes.rst | 2 ++ scripts/plot_network.py | 4 ++-- scripts/plot_summary.py | 1 - scripts/prepare_sector_network.py | 17 ----------------- 6 files changed, 4 insertions(+), 23 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a276..e8ca22dc9 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -451,7 +451,6 @@ sector: solar_cf_correction: 0.788457 # = >>> 1/1.2683 marginal_cost_storage: 0. #1e-4 methanation: true - helmeth: false coal_cc: false dac: true co2_vent: false @@ -954,7 +953,6 @@ plotting: Sabatier: '#9850ad' methanation: '#c44ce6' methane: '#c44ce6' - helmeth: '#e899ff' # synfuels Fischer-Tropsch: '#25c49a' liquid: '#25c49a' diff --git a/doc/configtables/sector.csv b/doc/configtables/sector.csv index 280c19064..57e6ce3dd 100644 --- a/doc/configtables/sector.csv +++ b/doc/configtables/sector.csv @@ -71,7 +71,6 @@ solar_thermal,--,"{true, false}",Add option for using solar thermal to generate solar_cf_correction,--,float,The correction factor for the value provided by the solar thermal profile calculations marginal_cost_storage,currency/MWh ,float,The marginal cost of discharging batteries in distributed grids methanation,--,"{true, false}",Add option for transforming hydrogen and CO2 into methane using methanation. -helmeth,--,"{true, false}",Add option for transforming power into gas using HELMETH (Integrated High-Temperature ELectrolysis and METHanation for Effective Power to Gas Conversion) coal_cc,--,"{true, false}",Add option for coal CHPs with carbon capture dac,--,"{true, false}",Add option for Direct Air Capture (DAC) co2_vent,--,"{true, false}",Add option for vent out CO2 from storages to the atmosphere. diff --git a/doc/release_notes.rst b/doc/release_notes.rst index f84c0f834..5ac7925e1 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -79,6 +79,8 @@ Upcoming Release Energy Monitor's `Europe Gas Tracker `_. +* Remove HELMETH option. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) diff --git a/scripts/plot_network.py b/scripts/plot_network.py index f44bb6de3..674811208 100644 --- a/scripts/plot_network.py +++ b/scripts/plot_network.py @@ -31,7 +31,7 @@ def rename_techs_tyndp(tech): tech = rename_techs(tech) if "heat pump" in tech or "resistive heater" in tech: return "power-to-heat" - elif tech in ["H2 Electrolysis", "methanation", "helmeth", "H2 liquefaction"]: + elif tech in ["H2 Electrolysis", "methanation", "H2 liquefaction"]: return "power-to-gas" elif tech == "H2": return "H2 storage" @@ -495,7 +495,7 @@ def plot_ch4_map(network): # make a fake MultiIndex so that area is correct for legend fossil_gas.index = pd.MultiIndex.from_product([fossil_gas.index, ["fossil gas"]]) - methanation_i = n.links[n.links.carrier.isin(["helmeth", "Sabatier"])].index + methanation_i = n.links.query("carrier == 'Sabatier'").index methanation = ( abs( n.links_t.p1.loc[:, methanation_i].mul( diff --git a/scripts/plot_summary.py b/scripts/plot_summary.py index 5804e7854..67ac9b553 100644 --- a/scripts/plot_summary.py +++ b/scripts/plot_summary.py @@ -121,7 +121,6 @@ def rename_techs(label): "gas boiler", "gas", "natural gas", - "helmeth", "methanation", "ammonia", "hydrogen storage", diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index c4a67a388..f746fe9c0 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -1369,23 +1369,6 @@ def add_storage_and_grids(n, costs): lifetime=costs.at["methanation", "lifetime"], ) - if options["helmeth"]: - n.madd( - "Link", - spatial.nodes, - suffix=" helmeth", - bus0=nodes, - bus1=spatial.gas.nodes, - bus2=spatial.co2.nodes, - carrier="helmeth", - p_nom_extendable=True, - efficiency=costs.at["helmeth", "efficiency"], - efficiency2=-costs.at["helmeth", "efficiency"] - * costs.at["gas", "CO2 intensity"], - capital_cost=costs.at["helmeth", "fixed"], - lifetime=costs.at["helmeth", "lifetime"], - ) - if options.get("coal_cc"): n.madd( "Link", From 92df7bbb9c786667364f7358f5ee90caad87ec1d Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:27:42 +0100 Subject: [PATCH 47/64] build_renewable_profiles: improve logging of time passed --- scripts/build_renewable_profiles.py | 29 +++++++++++++++++++++-------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 3a1c525e0..ef8683cb9 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -277,15 +277,14 @@ snakemake.input.country_shapes, buffer=buffer, invert=True ) + logger.info("Calculate landuse availability...") + start = time.time() + kwargs = dict(nprocesses=nprocesses, disable_progressbar=noprogress) - if noprogress: - logger.info("Calculate landuse availabilities...") - start = time.time() - availability = cutout.availabilitymatrix(regions, excluder, **kwargs) - duration = time.time() - start - logger.info(f"Completed availability calculation ({duration:2.2f}s)") - else: - availability = cutout.availabilitymatrix(regions, excluder, **kwargs) + availability = cutout.availabilitymatrix(regions, excluder, **kwargs) + + duration = time.time() - start + logger.info(f"Completed landuse availability calculation ({duration:2.2f}s)") # For Moldova and Ukraine: Overwrite parts not covered by Corine with # externally determined available areas @@ -304,8 +303,19 @@ func = getattr(cutout, resource.pop("method")) if client is not None: resource["dask_kwargs"] = {"scheduler": client} + + logger.info("Calculate average capacity factor...") + start = time.time() + capacity_factor = correction_factor * func(capacity_factor=True, **resource) layout = capacity_factor * area * capacity_per_sqkm + + duration = time.time() - start + logger.info(f"Completed average capacity factor calculation ({duration:2.2f}s)") + + logger.info("Calculate weighted capacity factor time series...") + start = time.time() + profile, capacities = func( matrix=availability.stack(spatial=["y", "x"]), layout=layout, @@ -315,6 +325,9 @@ **resource, ) + duration = time.time() - start + logger.info(f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)") + logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") if p_nom_max_meth == "simple": p_nom_max = capacity_per_sqkm * availability @ area From fdb63bc6ca4c3aa332104d26bca1c0a5d5c546c1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:29:08 +0000 Subject: [PATCH 48/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_renewable_profiles.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index ef8683cb9..83c79482b 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -326,7 +326,9 @@ ) duration = time.time() - start - logger.info(f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)") + logger.info( + f"Completed weighted capacity factor time series calculation ({duration:2.2f}s)" + ) logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") if p_nom_max_meth == "simple": From 6b344c9901f7aa78d8714ad00cb9626b2773cb37 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:33:33 +0100 Subject: [PATCH 49/64] renewable_profiles: remove conservative potential estimation method --- config/config.default.yaml | 4 -- doc/configtables/offwind-ac.csv | 1 - doc/configtables/offwind-dc.csv | 1 - doc/configtables/onwind.csv | 1 - doc/configtables/solar.csv | 1 - scripts/build_renewable_profiles.py | 98 ++++++++++++----------------- 6 files changed, 40 insertions(+), 66 deletions(-) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a276..dc818e84c 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -167,7 +167,6 @@ renewable: distance_grid_codes: [1, 2, 3, 4, 5, 6] natura: true excluder_resolution: 100 - potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-ac: cutout: europe-2013-era5 @@ -183,7 +182,6 @@ renewable: max_depth: 50 max_shore_distance: 30000 excluder_resolution: 200 - potential: simple # or conservative clip_p_max_pu: 1.e-2 offwind-dc: cutout: europe-2013-era5 @@ -199,7 +197,6 @@ renewable: max_depth: 50 min_shore_distance: 30000 excluder_resolution: 200 - potential: simple # or conservative clip_p_max_pu: 1.e-2 solar: cutout: europe-2013-sarah @@ -214,7 +211,6 @@ renewable: corine: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 26, 31, 32] natura: true excluder_resolution: 100 - potential: simple # or conservative clip_p_max_pu: 1.e-2 hydro: cutout: europe-2013-era5 diff --git a/doc/configtables/offwind-ac.csv b/doc/configtables/offwind-ac.csv index 6b756799c..c3512a9e7 100644 --- a/doc/configtables/offwind-ac.csv +++ b/doc/configtables/offwind-ac.csv @@ -12,5 +12,4 @@ ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential." max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build. Such areas close to the shore are excluded in the process of calculating the AC-connected offshore wind potential." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." diff --git a/doc/configtables/offwind-dc.csv b/doc/configtables/offwind-dc.csv index 1f72228aa..350955973 100644 --- a/doc/configtables/offwind-dc.csv +++ b/doc/configtables/offwind-dc.csv @@ -12,5 +12,4 @@ ship_threshold,--,float,"Ship density threshold from which areas are excluded." max_depth,m,float,"Maximum sea water depth at which wind turbines can be build. Maritime areas with deeper waters are excluded in the process of calculating the AC-connected offshore wind potential." min_shore_distance,m,float,"Minimum distance to the shore below which wind turbines cannot be build." max_shore_distance,m,float,"Maximum distance to the shore above which wind turbines cannot be build." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." diff --git a/doc/configtables/onwind.csv b/doc/configtables/onwind.csv index ba9482e56..b7e823b36 100644 --- a/doc/configtables/onwind.csv +++ b/doc/configtables/onwind.csv @@ -9,7 +9,6 @@ corine,,, -- distance,m,float,"Distance to keep from areas specified in ``distance_grid_codes``" -- distance_grid_codes,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes to which wind turbines must maintain a distance specified in the setting ``distance``." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." correction_factor,--,float,"Correction factor for capacity factor time series." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/doc/configtables/solar.csv b/doc/configtables/solar.csv index 803445d5d..7da1281bc 100644 --- a/doc/configtables/solar.csv +++ b/doc/configtables/solar.csv @@ -10,6 +10,5 @@ capacity_per_sqkm,:math:`MW/km^2`,float,"Allowable density of solar panel placem correction_factor,--,float,"A correction factor for the capacity factor (availability) time series." corine,--,"Any subset of the `CORINE Land Cover code list `_","Specifies areas according to CORINE Land Cover codes which are generally eligible for solar panel placement." natura,bool,"{true, false}","Switch to exclude `Natura 2000 `_ natural protection areas. Area is excluded if ``true``." -potential,--,"One of {'simple', 'conservative'}","Method to compute the maximal installable potential for a node; confer :ref:`renewableprofiles`" clip_p_max_pu,p.u.,float,"To avoid too small values in the renewables` per-unit availability time series values below this threshold are set to zero." excluder_resolution,m,float,"Resolution on which to perform geographical elibility analysis." diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index 3a1c525e0..c33bdf9bb 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected -offshore wind, DC-connected offshore wind and solar PV generators. In addition -for offshore wind it calculates the fraction of the grid connection which is -under water. +(iii) the average distance from the node for onshore wind, AC-connected offshore +wind, DC-connected offshore wind and solar PV generators. In addition for +offshore wind it calculates the fraction of the grid connection which is under +water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. @@ -26,20 +26,9 @@ renewable: {technology}: - cutout: - corine: - grid_codes: - distance: - natura: - max_depth: - max_shore_distance: - min_shore_distance: - capacity_per_sqkm: - correction_factor: - potential: - min_p_max_pu: - clip_p_max_pu: - resource: + cutout: corine: grid_codes: distance: natura: max_depth: + max_shore_distance: min_shore_distance: capacity_per_sqkm: + correction_factor: min_p_max_pu: clip_p_max_pu: resource: .. seealso:: Documentation of the configuration file ``config/config.yaml`` at @@ -48,21 +37,30 @@ Inputs ------ -- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) `_ inventory on `44 classes `_ of land use (e.g. forests, arable land, industrial, urban areas). +- ``data/bundle/corine/g250_clc06_V18_5.tif``: `CORINE Land Cover (CLC) + `_ inventory on `44 + classes `_ of + land use (e.g. forests, arable land, industrial, urban areas). .. image:: img/corine.png :scale: 33 % -- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric `_ data set with a global terrain model for ocean and land at 15 arc-second intervals by the `General Bathymetric Chart of the Oceans (GEBCO) `_. +- ``data/bundle/GEBCO_2014_2D.nc``: A `bathymetric + `_ data set with a global terrain + model for ocean and land at 15 arc-second intervals by the `General + Bathymetric Chart of the Oceans (GEBCO) + `_. .. image:: img/gebco_2019_grid_image.jpg :scale: 50 % - **Source:** `GEBCO `_ + **Source:** `GEBCO + `_ - ``resources/natura.tiff``: confer :ref:`natura` - ``resources/offshore_shapes.geojson``: confer :ref:`shapes` -- ``resources/regions_onshore.geojson``: (if not offshore wind), confer :ref:`busregions` +- ``resources/regions_onshore.geojson``: (if not offshore wind), confer + :ref:`busregions` - ``resources/regions_offshore.geojson``: (if offshore wind), :ref:`busregions` - ``"cutouts/" + params["renewable"][{technology}]['cutout']``: :ref:`cutout` - ``networks/base.nc``: :ref:`base` @@ -128,25 +126,25 @@ This script functions at two main spatial resolutions: the resolution of the network nodes and their `Voronoi cells `_, and the resolution of the -cutout grid cells for the weather data. Typically the weather data grid is -finer than the network nodes, so we have to work out the distribution of -generators across the grid cells within each Voronoi cell. This is done by -taking account of a combination of the available land at each grid cell and the -capacity factor there. +cutout grid cells for the weather data. Typically the weather data grid is finer +than the network nodes, so we have to work out the distribution of generators +across the grid cells within each Voronoi cell. This is done by taking account +of a combination of the available land at each grid cell and the capacity factor +there. First the script computes how much of the technology can be installed at each cutout grid cell and each node using the `GLAES -`_ library. This uses the CORINE land use data, -Natura2000 nature reserves and GEBCO bathymetry data. +`_ library. This uses the CORINE land use +data, Natura2000 nature reserves and GEBCO bathymetry data. .. image:: img/eligibility.png :scale: 50 % :align: center -To compute the layout of generators in each node's Voronoi cell, the -installable potential in each grid cell is multiplied with the capacity factor -at each grid cell. This is done since we assume more generators are installed -at cells with a higher capacity factor. +To compute the layout of generators in each node's Voronoi cell, the installable +potential in each grid cell is multiplied with the capacity factor at each grid +cell. This is done since we assume more generators are installed at cells with a +higher capacity factor. .. image:: img/offwinddc-gridcell.png :scale: 50 % @@ -164,20 +162,14 @@ :scale: 50 % :align: center -This layout is then used to compute the generation availability time series -from the weather data cutout from ``atlite``. +This layout is then used to compute the generation availability time series from +the weather data cutout from ``atlite``. -Two methods are available to compute the maximal installable potential for the -node (`p_nom_max`): ``simple`` and ``conservative``: - -- ``simple`` adds up the installable potentials of the individual grid cells. - If the model comes close to this limit, then the time series may slightly - overestimate production since it is assumed the geographical distribution is - proportional to capacity factor. - -- ``conservative`` assertains the nodal limit by increasing capacities - proportional to the layout until the limit of an individual grid cell is - reached. +The maximal installable potential for the node (`p_nom_max`) is computed by +adding up the installable potentials of the individual grid cells. +If the model comes close to this limit, then the time series may slightly +overestimate production since it is assumed the geographical distribution is +proportional to capacity factor. """ import functools import logging @@ -210,7 +202,6 @@ resource = params["resource"] # pv panel params / wind turbine params correction_factor = params.get("correction_factor", 1.0) capacity_per_sqkm = params["capacity_per_sqkm"] - p_nom_max_meth = params.get("potential", "conservative") if isinstance(params.get("corine", {}), list): params["corine"] = {"grid_codes": params["corine"]} @@ -315,17 +306,8 @@ **resource, ) - logger.info(f"Calculating maximal capacity per bus (method '{p_nom_max_meth}')") - if p_nom_max_meth == "simple": - p_nom_max = capacity_per_sqkm * availability @ area - elif p_nom_max_meth == "conservative": - max_cap_factor = capacity_factor.where(availability != 0).max(["x", "y"]) - p_nom_max = capacities / max_cap_factor - else: - raise AssertionError( - 'Config key `potential` should be one of "simple" ' - f'(default) or "conservative", not "{p_nom_max_meth}"' - ) + logger.info(f"Calculating maximal capacity per bus") + p_nom_max = capacity_per_sqkm * availability @ area logger.info("Calculate average distances.") layoutmatrix = (layout * availability).stack(spatial=["y", "x"]) From 38d587944b8625cfb208f6cc0c5046b1a3ee97d6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 09:35:05 +0000 Subject: [PATCH 50/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_renewable_profiles.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/build_renewable_profiles.py b/scripts/build_renewable_profiles.py index c33bdf9bb..0ad840ba8 100644 --- a/scripts/build_renewable_profiles.py +++ b/scripts/build_renewable_profiles.py @@ -7,10 +7,10 @@ """ Calculates for each network node the (i) installable capacity (based on land- use), (ii) the available generation time series (based on weather data), and -(iii) the average distance from the node for onshore wind, AC-connected offshore -wind, DC-connected offshore wind and solar PV generators. In addition for -offshore wind it calculates the fraction of the grid connection which is under -water. +(iii) the average distance from the node for onshore wind, AC-connected +offshore wind, DC-connected offshore wind and solar PV generators. In addition +for offshore wind it calculates the fraction of the grid connection which is +under water. .. note:: Hydroelectric profiles are built in script :mod:`build_hydro_profiles`. From e423945e7d709d1cb59d85caab4b306bd752c045 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 10:54:53 +0100 Subject: [PATCH 51/64] gas_input: ensure all columns exist even if column empty --- scripts/build_gas_input_locations.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/scripts/build_gas_input_locations.py b/scripts/build_gas_input_locations.py index 2f967c75f..9ad3760d9 100644 --- a/scripts/build_gas_input_locations.py +++ b/scripts/build_gas_input_locations.py @@ -161,8 +161,12 @@ def build_gas_input_locations(gem_fn, entry_fn, sto_fn, countries): gas_input_nodes.to_file(snakemake.output.gas_input_nodes, driver="GeoJSON") + ensure_columns = ["lng", "pipeline", "production", "storage"] gas_input_nodes_s = ( - gas_input_nodes.groupby(["bus", "type"])["capacity"].sum().unstack() + gas_input_nodes.groupby(["bus", "type"])["capacity"] + .sum() + .unstack() + .reindex(columns=ensure_columns) ) gas_input_nodes_s.columns.name = "capacity" From 29afffb4ca1b8480d88580769960b9536c17ef26 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 11:31:56 +0100 Subject: [PATCH 52/64] fix potential duplicate renaming of length_original --- scripts/prepare_sector_network.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 2ba64e877..54d5d7c81 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3441,8 +3441,9 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): rev_links = ( n.links.loc[carrier_i] .copy() - .rename({"bus0": "bus1", "bus1": "bus0", "length": "length_original"}, axis=1) + .rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) + rev_links["length_original"] = rev_links["length"] rev_links["capital_cost"] = 0 rev_links["length"] = 0 rev_links["reversed"] = True From 4606cb131b292c02e95b2af3583e7df48561fcb9 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 10:44:14 +0000 Subject: [PATCH 53/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/prepare_sector_network.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 54d5d7c81..815bf6ffd 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -3439,9 +3439,7 @@ def lossy_bidirectional_links(n, carrier, efficiencies={}): n.links.loc[carrier_i, "length"] / 1e3 ) rev_links = ( - n.links.loc[carrier_i] - .copy() - .rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) + n.links.loc[carrier_i].copy().rename({"bus0": "bus1", "bus1": "bus0"}, axis=1) ) rev_links["length_original"] = rev_links["length"] rev_links["capital_cost"] = 0 From 05495ce48413d2aee4c351da29b230cd62add824 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 12:46:42 +0100 Subject: [PATCH 54/64] fix lossy bidirectional link coupling countraint for myopic --- scripts/solve_network.py | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index a21258952..0bfc68ffe 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -691,13 +691,24 @@ def add_lossy_bidirectional_link_constraints(n): if not n.links.p_nom_extendable.any() or not "reversed" in n.links.columns: return - reversed_links = n.links.reversed.fillna(0).astype(bool) - carriers = n.links.loc[reversed_links, "carrier"].unique() + n.links["reversed"] = n.links.reversed.fillna(0).astype(bool) + carriers = n.links.loc[n.links.reversed, "carrier"].unique() forward_i = n.links.query( "carrier in @carriers and ~reversed and p_nom_extendable" ).index - backward_i = forward_i + "-reversed" + + def get_backward_i(forward_i): + return pd.Index( + [ + re.sub(r"-(\d{4})$", r"-reversed-\1", s) + if re.search(r"-\d{4}$", s) + else s + "-reversed" + for s in forward_i + ] + ) + + backward_i = get_backward_i(forward_i) lhs = n.model["Link-p_nom"].loc[backward_i] rhs = n.model["Link-p_nom"].loc[forward_i] From 80f9259bac4742b0f819ddc6542da458a7690874 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 12:57:22 +0100 Subject: [PATCH 55/64] handle gas pipeline retrofitting with lossy links --- scripts/solve_network.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index 0bfc68ffe..98afd49d5 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -774,9 +774,13 @@ def add_pipe_retrofit_constraint(n): """ Add constraint for retrofitting existing CH4 pipelines to H2 pipelines. """ - gas_pipes_i = n.links.query("carrier == 'gas pipeline' and p_nom_extendable").index + if "reversed" not in n.links.columns: + n.links["reversed"] = False + gas_pipes_i = n.links.query( + "carrier == 'gas pipeline' and p_nom_extendable and ~reversed" + ).index h2_retrofitted_i = n.links.query( - "carrier == 'H2 pipeline retrofitted' and p_nom_extendable" + "carrier == 'H2 pipeline retrofitted' and p_nom_extendable and ~reversed" ).index if h2_retrofitted_i.empty or gas_pipes_i.empty: From a3cfc8cde51a87ed2fe0babdd4f5c5b42cf993be Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:05:46 +0100 Subject: [PATCH 56/64] add heat vent to tech_colors --- config/config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config/config.default.yaml b/config/config.default.yaml index d7704a276..b9fb76f47 100644 --- a/config/config.default.yaml +++ b/config/config.default.yaml @@ -885,6 +885,7 @@ plotting: # heat demand Heat load: '#cc1f1f' heat: '#cc1f1f' + heat vent: '#aa3344' heat demand: '#cc1f1f' rural heat: '#ff5c5c' residential rural heat: '#ff7c7c' From bcafbb1e5459ac90eb3fbb65f9b3da22149a2f7a Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:15:43 +0100 Subject: [PATCH 57/64] compatibility for config with single node in single country --- scripts/build_clustered_population_layouts.py | 1 - scripts/build_heat_demand.py | 1 - scripts/build_solar_thermal_profiles.py | 1 - scripts/build_temperature_profiles.py | 1 - 4 files changed, 4 deletions(-) diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 083f3de46..73972d3d6 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -28,7 +28,6 @@ gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index 734942600..da7c476e9 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -34,7 +34,6 @@ gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index d285691a4..4e7a6cd4a 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -36,7 +36,6 @@ gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index 9db37c257..d8eaadcec 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -34,7 +34,6 @@ gpd.read_file(snakemake.input.regions_onshore) .set_index("name") .buffer(0) - .squeeze() ) I = cutout.indicatormatrix(clustered_regions) From d7051e7f66eb3bdbe0f790ea4513cbf01133b09a Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 12:16:43 +0000 Subject: [PATCH 58/64] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/build_clustered_population_layouts.py | 4 +--- scripts/build_heat_demand.py | 4 +--- scripts/build_solar_thermal_profiles.py | 4 +--- scripts/build_temperature_profiles.py | 4 +--- 4 files changed, 4 insertions(+), 12 deletions(-) diff --git a/scripts/build_clustered_population_layouts.py b/scripts/build_clustered_population_layouts.py index 73972d3d6..2f2376569 100644 --- a/scripts/build_clustered_population_layouts.py +++ b/scripts/build_clustered_population_layouts.py @@ -25,9 +25,7 @@ cutout = atlite.Cutout(snakemake.input.cutout) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_heat_demand.py b/scripts/build_heat_demand.py index da7c476e9..777684043 100644 --- a/scripts/build_heat_demand.py +++ b/scripts/build_heat_demand.py @@ -31,9 +31,7 @@ cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_solar_thermal_profiles.py b/scripts/build_solar_thermal_profiles.py index 4e7a6cd4a..ee6ed881d 100644 --- a/scripts/build_solar_thermal_profiles.py +++ b/scripts/build_solar_thermal_profiles.py @@ -33,9 +33,7 @@ cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) diff --git a/scripts/build_temperature_profiles.py b/scripts/build_temperature_profiles.py index d8eaadcec..a13ec3c21 100644 --- a/scripts/build_temperature_profiles.py +++ b/scripts/build_temperature_profiles.py @@ -31,9 +31,7 @@ cutout = atlite.Cutout(snakemake.input.cutout).sel(time=time) clustered_regions = ( - gpd.read_file(snakemake.input.regions_onshore) - .set_index("name") - .buffer(0) + gpd.read_file(snakemake.input.regions_onshore).set_index("name").buffer(0) ) I = cutout.indicatormatrix(clustered_regions) From 00aa07242a313755f8de1a2a6da7111f4cc1abf6 Mon Sep 17 00:00:00 2001 From: Tom Brown Date: Fri, 8 Dec 2023 17:53:28 +0100 Subject: [PATCH 59/64] add_brownfield: disable grid expansion if LV already hit Numerical problems were causing infeasibilities otherwise --- scripts/add_brownfield.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index 741025801..fb1453fd7 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,6 +119,32 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 +def disable_grid_expansion_if_LV_limit_hit(n): + if not "lv_limit" in n.global_constraints.index: + return + + #calculate minimum LV + attr = "nom_min" + dc = n.links.index[n.links.carrier == "DC"] + tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + + diff = n.global_constraints.at["lv_limit","constant"]-tot + + #allow small numerical differences + limit = 1 + + if diff < limit: + logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + expandable_acs = n.lines.index[n.lines.s_nom_extendable] + n.lines.loc[expandable_acs,"s_nom_extendable"] = False + n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + + expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] + n.links.loc[expandable_dcs,"p_nom_extendable"] = False + n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + + n.global_constraints.drop("lv_limit", + inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): @@ -150,5 +176,7 @@ def add_brownfield(n, n_p, year): add_brownfield(n, n_p, year) + disable_grid_expansion_if_LV_limit_hit(n) + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 42f11752caa06a57f3b4bde2de24f0d5e5e95255 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:35:11 +0100 Subject: [PATCH 60/64] standardise formatting --- scripts/add_brownfield.py | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index fb1453fd7..ffdaf46be 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -119,32 +119,39 @@ def add_brownfield(n, n_p, year): n.links.loc[new_pipes, "p_nom"] = 0.0 n.links.loc[new_pipes, "p_nom_min"] = 0.0 + def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - #calculate minimum LV + # calculate minimum LV attr = "nom_min" dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr]*n.lines["length"]).sum() + (n.links.loc[dc,"p_" + attr]*n.links.loc[dc,"length"]).sum() + tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( + n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + ).sum() - diff = n.global_constraints.at["lv_limit","constant"]-tot + diff = n.global_constraints.at["lv_limit", "constant"] - tot - #allow small numerical differences + # allow small numerical differences limit = 1 if diff < limit: - logger.info(f"LV is already reached (gap {diff}), disabling expansion and LV limit") + logger.info( + f"LV is already reached (gap {diff}), disabling expansion and LV limit" + ) expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs,"s_nom_extendable"] = False - n.lines.loc[expandable_acs,"s_nom"] = n.lines.loc[expandable_acs,"s_nom_min"] + n.lines.loc[expandable_acs, "s_nom_extendable"] = False + n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] + + expandable_dcs = n.links.index[ + n.links.p_nom_extendable & (n.links.carrier == "DC") + ] + n.links.loc[expandable_dcs, "p_nom_extendable"] = False + n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] - expandable_dcs = n.links.index[n.links.p_nom_extendable & (n.links.carrier == "DC")] - n.links.loc[expandable_dcs,"p_nom_extendable"] = False - n.links.loc[expandable_dcs,"p_nom"] = n.links.loc[expandable_dcs,"p_nom_min"] + n.global_constraints.drop("lv_limit", inplace=True) - n.global_constraints.drop("lv_limit", - inplace=True) if __name__ == "__main__": if "snakemake" not in globals(): From deba2a4ed53163ade07d2ba7a64c4f928ae10c72 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:41:42 +0100 Subject: [PATCH 61/64] tidy code --- scripts/add_brownfield.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/scripts/add_brownfield.py b/scripts/add_brownfield.py index ffdaf46be..9ddd3d999 100644 --- a/scripts/add_brownfield.py +++ b/scripts/add_brownfield.py @@ -124,31 +124,25 @@ def disable_grid_expansion_if_LV_limit_hit(n): if not "lv_limit" in n.global_constraints.index: return - # calculate minimum LV - attr = "nom_min" - dc = n.links.index[n.links.carrier == "DC"] - tot = (n.lines["s_" + attr] * n.lines["length"]).sum() + ( - n.links.loc[dc, "p_" + attr] * n.links.loc[dc, "length"] + total_expansion = ( + n.lines.eval("s_nom_min * length").sum() + + n.links.query("carrier == 'DC'").eval("p_nom_min * length").sum() ).sum() - diff = n.global_constraints.at["lv_limit", "constant"] - tot + lv_limit = n.global_constraints.at["lv_limit", "constant"] # allow small numerical differences - limit = 1 - - if diff < limit: + if lv_limit - total_expansion < 1: logger.info( - f"LV is already reached (gap {diff}), disabling expansion and LV limit" + f"LV is already reached (gap {diff} MWkm), disabling expansion and LV limit" ) - expandable_acs = n.lines.index[n.lines.s_nom_extendable] - n.lines.loc[expandable_acs, "s_nom_extendable"] = False - n.lines.loc[expandable_acs, "s_nom"] = n.lines.loc[expandable_acs, "s_nom_min"] + extendable_acs = n.lines.query("s_nom_extendable").index + n.lines.loc[extendable_acs, "s_nom_extendable"] = False + n.lines.loc[extendable_acs, "s_nom"] = n.lines.loc[extendable_acs, "s_nom_min"] - expandable_dcs = n.links.index[ - n.links.p_nom_extendable & (n.links.carrier == "DC") - ] - n.links.loc[expandable_dcs, "p_nom_extendable"] = False - n.links.loc[expandable_dcs, "p_nom"] = n.links.loc[expandable_dcs, "p_nom_min"] + extendable_dcs = n.links.query("carrier == 'DC' and p_nom_extendable").index + n.links.loc[extendable_dcs, "p_nom_extendable"] = False + n.links.loc[extendable_dcs, "p_nom"] = n.links.loc[extendable_dcs, "p_nom_min"] n.global_constraints.drop("lv_limit", inplace=True) From 257b16efd8efae8848171083c1d4c04ab4af9579 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:47:13 +0100 Subject: [PATCH 62/64] print IIS if solver returns status infeasible --- scripts/solve_network.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/solve_network.py b/scripts/solve_network.py index ff2a2f232..8c46e0252 100644 --- a/scripts/solve_network.py +++ b/scripts/solve_network.py @@ -839,6 +839,9 @@ def solve_network(n, config, solving, opts="", **kwargs): f"Solving status '{status}' with termination condition '{condition}'" ) if "infeasible" in condition: + labels = n.model.compute_infeasibilities() + logger.info("Labels:\n" + labels) + n.model.print_infeasibilities() raise RuntimeError("Solving status 'infeasible'") return n From 2acddb6a7ccf1c6d30bcf8d452e7c2bd61a7a36c Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 13:48:34 +0100 Subject: [PATCH 63/64] add release note --- doc/release_notes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/doc/release_notes.rst b/doc/release_notes.rst index 5ac7925e1..31e492a81 100644 --- a/doc/release_notes.rst +++ b/doc/release_notes.rst @@ -81,6 +81,9 @@ Upcoming Release * Remove HELMETH option. +* Print Irreducible Infeasible Subset (IIS) if model is infeasible. Only for + solvers with IIS support. + **Bugs and Compatibility** * A bug preventing custom powerplants specified in ``data/custom_powerplants.csv`` was fixed. (https://github.com/PyPSA/pypsa-eur/pull/732) From 1a6031f318aab522d3356c2bb4ef314b3eed76d2 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Wed, 3 Jan 2024 15:25:27 +0100 Subject: [PATCH 64/64] only copy config.default.yaml if it exists --- Snakefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Snakefile b/Snakefile index 83530df7f..14ce0e405 100644 --- a/Snakefile +++ b/Snakefile @@ -14,7 +14,7 @@ from snakemake.utils import min_version min_version("7.7") -if not exists("config/config.yaml"): +if not exists("config/config.yaml") and exists("config/config.default.yaml"): copyfile("config/config.default.yaml", "config/config.yaml")