From 6c13974643ee9687d3053e29932af73a7da01dc5 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Mon, 1 Aug 2022 18:02:55 +0200 Subject: [PATCH 1/7] add option to cluster heat buses --- config.default.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/config.default.yaml b/config.default.yaml index b2fa5f6b..ad6764c7 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -154,6 +154,7 @@ sector: # 2040: 0.6 # 2050: 1.0 district_heating_loss: 0.15 + cluster_heat_buses: False # cluster residential and service heat buses to one to save memory bev_dsm_restriction_value: 0.75 #Set to 0 for no restriction on BEV DSM bev_dsm_restriction_time: 7 #Time at which SOC of BEV has to be dsm_restriction_value transport_heating_deadband_upper: 20. From 973074de217687440af9a86aca8a813dd02ceab9 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Mon, 1 Aug 2022 18:03:11 +0200 Subject: [PATCH 2/7] add function to cluster heat buses --- scripts/prepare_sector_network.py | 99 +++++++++++++++++++++++++++++++ 1 file changed, 99 insertions(+) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 7abdadff..1eabc37d 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -19,6 +19,7 @@ from networkx.algorithms.connectivity.edge_augmentation import k_edge_augmentation from networkx.algorithms import complement from pypsa.geo import haversine_pts +from pypsa.io import import_components_from_dataframe import logging logger = logging.getLogger(__name__) @@ -26,6 +27,9 @@ from types import SimpleNamespace spatial = SimpleNamespace() +from distutils.version import LooseVersion +pd_version = LooseVersion(pd.__version__) +agg_group_kwargs = dict(numeric_only=False) if pd_version >= "1.3" else {} def define_spatial(nodes, options): """ @@ -2323,6 +2327,99 @@ def limit_individual_line_extension(n, maxext): hvdc = n.links.index[n.links.carrier == 'DC'] n.links.loc[hvdc, 'p_nom_max'] = n.links.loc[hvdc, 'p_nom'] + maxext + +aggregate_dict = { + "p_nom": "sum", + "s_nom": "sum", + "v_nom": "max", + "v_mag_pu_max": "min", + "v_mag_pu_min": "max", + "p_nom_max": "sum", + "s_nom_max": "sum", + "p_nom_min": "sum", + "s_nom_min": "sum", + 'v_ang_min': "max", + "v_ang_max":"min", + "terrain_factor":"mean", + "num_parallel": "sum", + "p_set": "sum", + "e_initial": "sum", + "e_nom": "sum", + "e_nom_max": "sum", + "e_nom_min": "sum", + "state_of_charge_initial": "sum", + "state_of_charge_set": "sum", + "inflow": "sum", + "p_max_pu": "first", + "x": "mean", + "y": "mean" +} + +def cluster_heat_buses(n): + """Cluster residential and service heat buses to one representative bus. + This can be done to save memory and speed up optimisation + """ + + def define_clustering(attributes, aggregate_dict): + """Define how attributes should be clustered. + Input: + attributes : pd.Index() + aggregate_dict: dictionary (key: name of attribute, value + clustering method) + + Returns: + agg : clustering dictionary + """ + keys = attributes.intersection(aggregate_dict.keys()) + agg = dict( + zip( + attributes.difference(keys), + ["first"] * len(df.columns.difference(keys)), + ) + ) + for key in keys: + agg[key] = aggregate_dict[key] + return agg + + logger.info("Cluster residential and service heat buses.") + components = ["Bus", "Carrier", "Generator", "Link", "Load", "Store"] + + for c in n.iterate_components(components): + df = c.df + cols = df.columns[df.columns.str.contains("bus") | (df.columns=="carrier")] + + # rename columns and index + df[cols] = (df[cols] + .apply(lambda x: x.str.replace("residential ","") + .str.replace("services ", ""), axis=1)) + df = df.rename(index=lambda x: x.replace("residential ","") + .replace("services ", "")) + + + # cluster heat nodes + # static dataframe + agg = define_clustering(df.columns, aggregate_dict) + df = df.groupby(level=0).agg(agg, **agg_group_kwargs) + # time-varying data + pnl = c.pnl + agg = define_clustering(pd.Index(pnl.keys()), aggregate_dict) + for k in pnl.keys(): + pnl[k].rename(columns=lambda x: x.replace("residential ","") + .replace("services ", ""), inplace=True) + pnl[k] = ( + pnl[k] + .groupby(level=0, axis=1) + .agg(agg[k], **agg_group_kwargs) + ) + + # remove unclustered assets of service/residential + to_drop = c.df.index.difference(df.index) + n.mremove(c.name, to_drop) + # add clustered assets + to_add = df.index.difference(c.df.index) + import_components_from_dataframe(n, df.loc[to_add], c.name) + + #%% if __name__ == "__main__": if 'snakemake' not in globals(): @@ -2467,4 +2564,6 @@ def limit_individual_line_extension(n, maxext): if options['electricity_grid_connection']: add_electricity_grid_connection(n, costs) + if options["cluster_heat_buses"]: + cluster_heat_buses(n) n.export_to_netcdf(snakemake.output[0]) From cbab86c4bcf2227aaa34f9dfd198e62a6e0d2c27 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Mon, 1 Aug 2022 18:15:35 +0200 Subject: [PATCH 3/7] add heat buses clustering to myopic --- scripts/add_existing_baseyear.py | 5 ++++- scripts/prepare_sector_network.py | 6 +++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index 11b8d49b..1cf532a3 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -12,7 +12,7 @@ import pypsa import yaml -from prepare_sector_network import prepare_costs, define_spatial +from prepare_sector_network import prepare_costs, define_spatial, cluster_heat_buses from helper import override_component_attrs, update_config_with_sector_opts from types import SimpleNamespace @@ -495,4 +495,7 @@ def add_heating_capacities_installed_before_baseyear(n, baseyear, grouping_years default_lifetime = snakemake.config['costs']['lifetime'] add_heating_capacities_installed_before_baseyear(n, baseyear, grouping_years, ashp_cop, gshp_cop, time_dep_hp_cop, costs, default_lifetime) + if options["cluster_heat_buses"]: + cluster_heat_buses(n) + n.export_to_netcdf(snakemake.output[0]) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index 1eabc37d..c3b518e2 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2564,6 +2564,10 @@ def define_clustering(attributes, aggregate_dict): if options['electricity_grid_connection']: add_electricity_grid_connection(n, costs) - if options["cluster_heat_buses"]: + first_year_myopic = ((snakemake.config["foresight"] == 'myopic') and + (snakemake.config["scenario"]["planning_horizons"][0]==investment_year)) + + if options["cluster_heat_buses"] and not first_year_myopic: cluster_heat_buses(n) + n.export_to_netcdf(snakemake.output[0]) From 34a3d9aaad05706d73bb1a3857afd7a23087b1e2 Mon Sep 17 00:00:00 2001 From: lisazeyen Date: Tue, 2 Aug 2022 09:27:37 +0200 Subject: [PATCH 4/7] remove depreciated distutils.version --- scripts/prepare_sector_network.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index c3b518e2..c9494d74 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -27,9 +27,9 @@ from types import SimpleNamespace spatial = SimpleNamespace() -from distutils.version import LooseVersion -pd_version = LooseVersion(pd.__version__) -agg_group_kwargs = dict(numeric_only=False) if pd_version >= "1.3" else {} +from packaging.version import Version, parse +pd_version = parse(pd.__version__) +agg_group_kwargs = dict(numeric_only=False) if pd_version >= Version("1.3") else {} def define_spatial(nodes, options): """ From 984ab9350c4c05ecad3f9a3be7c628a4d478be59 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 30 Jan 2023 11:54:19 +0100 Subject: [PATCH 5/7] Update scripts/prepare_sector_network.py --- scripts/prepare_sector_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/prepare_sector_network.py b/scripts/prepare_sector_network.py index e07c672f..89d774ca 100644 --- a/scripts/prepare_sector_network.py +++ b/scripts/prepare_sector_network.py @@ -2920,7 +2920,7 @@ def set_temporal_aggregation(n, opts, solver_name): first_year_myopic = ((snakemake.config["foresight"] == 'myopic') and (snakemake.config["scenario"]["planning_horizons"][0]==investment_year)) - if options["cluster_heat_buses"] and not first_year_myopic: + if options.get("cluster_heat_buses", False) and not first_year_myopic: cluster_heat_buses(n) From 4a2da0a5f4d51c3bb725348a4c6bf14def11bd35 Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 30 Jan 2023 11:55:40 +0100 Subject: [PATCH 6/7] Update scripts/add_existing_baseyear.py --- scripts/add_existing_baseyear.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/scripts/add_existing_baseyear.py b/scripts/add_existing_baseyear.py index 3618f441..8e274d62 100644 --- a/scripts/add_existing_baseyear.py +++ b/scripts/add_existing_baseyear.py @@ -563,6 +563,9 @@ def add_heating_capacities_installed_before_baseyear(n, baseyear, grouping_years add_heating_capacities_installed_before_baseyear(n, baseyear, grouping_years_heat, ashp_cop, gshp_cop, time_dep_hp_cop, costs, default_lifetime) + if options.get("cluster_heat_buses", False): + cluster_heat_buses(n) + n.meta = dict(snakemake.config, **dict(wildcards=dict(snakemake.wildcards))) n.export_to_netcdf(snakemake.output[0]) From 7982a37b6de6d85cb531ad14e185c988c85b2e9f Mon Sep 17 00:00:00 2001 From: Fabian Neumann Date: Mon, 30 Jan 2023 11:56:01 +0100 Subject: [PATCH 7/7] Update config.default.yaml --- config.default.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config.default.yaml b/config.default.yaml index 5ab444e3..e165e895 100644 --- a/config.default.yaml +++ b/config.default.yaml @@ -160,7 +160,7 @@ sector: 2040: 0.6 2050: 1.0 district_heating_loss: 0.15 - cluster_heat_buses: False # cluster residential and service heat buses to one to save memory + cluster_heat_buses: false # cluster residential and service heat buses to one to save memory bev_dsm_restriction_value: 0.75 #Set to 0 for no restriction on BEV DSM bev_dsm_restriction_time: 7 #Time at which SOC of BEV has to be dsm_restriction_value transport_heating_deadband_upper: 20.