From 84d38228ae9f3f05e74b19d75a32c83bd0f09901 Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 18 Dec 2023 09:35:17 +0100 Subject: [PATCH 1/9] fix environment --- envs/environment.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index 5ec368acd..aaa9818c2 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -11,7 +11,7 @@ dependencies: - pip - atlite>=0.2.9 -- pypsa +- pypsa>=0.26.0 - linopy - dask @@ -27,9 +27,9 @@ dependencies: - lxml - powerplantmatching>=0.5.5 - numpy -- pandas>=1.4 +- pandas>=2.1 - geopandas>=0.11.0 -- xarray +- xarray<=2023.8.0 - rioxarray - netcdf4 - networkx @@ -46,6 +46,7 @@ dependencies: - tabula-py - pyxlsb - graphviz +- ipopt # Keep in conda environment when calling ipython - ipython From 7e543ad0cce0690fedb6f70d0c2e575f4bdd4051 Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 18 Dec 2023 09:57:05 +0100 Subject: [PATCH 2/9] base_network: fix pandas 2.1 issue --- scripts/base_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 1929c59a5..90854e344 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -560,7 +560,7 @@ def prefer_voltage(x, which): ~buses["under_construction"] ) - c_nan_b = buses.country.isnull() + c_nan_b = buses.country == 'na' if c_nan_b.sum() > 0: c_tag = _get_country(buses.loc[c_nan_b]) c_tag.loc[~c_tag.isin(countries)] = np.nan From ea3fdb6c15bbc0d70308970dad6f7784c970c4b3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 18 Dec 2023 08:58:13 +0000 Subject: [PATCH 3/9] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- scripts/base_network.py | 2 +- scripts/cluster_network.py | 2 +- scripts/simplify_network.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/base_network.py b/scripts/base_network.py index 90854e344..eda29451f 100644 --- a/scripts/base_network.py +++ b/scripts/base_network.py @@ -560,7 +560,7 @@ def prefer_voltage(x, which): ~buses["under_construction"] ) - c_nan_b = buses.country == 'na' + c_nan_b = buses.country == "na" if c_nan_b.sum() > 0: c_tag = _get_country(buses.loc[c_nan_b]) c_tag.loc[~c_tag.isin(countries)] = np.nan diff --git a/scripts/cluster_network.py b/scripts/cluster_network.py index 0f3f351fa..28f08396e 100644 --- a/scripts/cluster_network.py +++ b/scripts/cluster_network.py @@ -470,7 +470,7 @@ def plot_busmap_for_n_clusters(n, n_clusters, fn=None): n = pypsa.Network(snakemake.input.network) # remove integer outputs for compatibility with PyPSA v0.26.0 - n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore') + n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore") exclude_carriers = params.cluster_network["exclude_carriers"] aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index d12062c27..f88d10d48 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -537,7 +537,7 @@ def cluster( Nyears = n.snapshot_weightings.objective.sum() / 8760 # remove integer outputs for compatibility with PyPSA v0.26.0 - n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore') + n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore") n, trafo_map = simplify_network_to_380(n) From 68aef7b628b9d1ac1dd0cba0d64448587592f210 Mon Sep 17 00:00:00 2001 From: Fabian Hofmann Date: Mon, 18 Dec 2023 11:23:32 +0100 Subject: [PATCH 4/9] constrain xarray more recent version --- envs/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/envs/environment.yaml b/envs/environment.yaml index aaa9818c2..425233a89 100644 --- a/envs/environment.yaml +++ b/envs/environment.yaml @@ -29,7 +29,7 @@ dependencies: - numpy - pandas>=2.1 - geopandas>=0.11.0 -- xarray<=2023.8.0 +- xarray>=2023.11.0 - rioxarray - netcdf4 - networkx From ea42b3797ded1ab4f836ccb95efde49aa19d0527 Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 18 Dec 2023 11:43:04 +0100 Subject: [PATCH 5/9] simplify_network&pandas21: nans not supported --- scripts/simplify_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index f88d10d48..48a58ed48 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -243,7 +243,7 @@ def _aggregate_and_move_components( def replace_components(n, c, df, pnl): n.mremove(c, n.df(c).index) - import_components_from_dataframe(n, df, c) + import_components_from_dataframe(n, df.fillna(0), c) for attr, df in pnl.items(): if not df.empty: import_series_from_dataframe(n, df, c, attr) From 22acdd03a04a98dda0381bce787143045642fd5d Mon Sep 17 00:00:00 2001 From: martacki Date: Mon, 18 Dec 2023 11:49:53 +0100 Subject: [PATCH 6/9] omit future deprecation --- scripts/build_biomass_potentials.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index 5c1eb31dc..27b2e0a39 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -263,7 +263,7 @@ def convert_nuts2_to_regions(bio_nuts2, regions): df.to_csv(snakemake.output.biomass_potentials_all) grouper = {v: k for k, vv in params["classes"].items() for v in vv} - df = df.groupby(grouper, axis=1).sum() + df = df.T.groupby(grouper).sum() df *= 1e6 # TWh/a to MWh/a df.index.name = "MWh/a" From 4f67160993356ba1117949e567cbfec80ef74735 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 18 Dec 2023 11:57:21 +0100 Subject: [PATCH 7/9] fix snake format --- rules/retrieve.smk | 34 ++++++++++++++++++++-------------- 1 file changed, 20 insertions(+), 14 deletions(-) diff --git a/rules/retrieve.smk b/rules/retrieve.smk index 75cf80624..ac89e360b 100644 --- a/rules/retrieve.smk +++ b/rules/retrieve.smk @@ -227,6 +227,7 @@ if config["enable"]["retrieve"]: run: move(input[0], output[0]) + if config["enable"]["retrieve"]: # Downloading Copernicus Global Land Cover for land cover and land use: @@ -238,26 +239,29 @@ if config["enable"]["retrieve"]: static=True, ), output: - RESOURCES + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", - run: move(input[0], output[0]) + RESOURCES + + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif", + run: + move(input[0], output[0]) if config["enable"]["retrieve"]: - - current_month = datetime.now().strftime('%b') - current_year = datetime.now().strftime('%Y') + current_month = datetime.now().strftime("%b") + current_year = datetime.now().strftime("%Y") bYYYY = f"{current_month}{current_year}" def check_file_exists(url): response = requests.head(url) return response.status_code == 200 - url = f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip' + url = f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" if not check_file_exists(url): - prev_month = (datetime.now()-timedelta(30)).strftime('%b') + prev_month = (datetime.now() - timedelta(30)).strftime("%b") bYYYY = f"{prev_month}{current_year}" - assert check_file_exists(f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip'), "The file does not exist." + assert check_file_exists( + f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip" + ), "The file does not exist." # Downloading protected area database from WDPA # extract the main zip and then merge the contained 3 zipped shapefiles @@ -268,7 +272,7 @@ if config["enable"]["retrieve"]: f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip", static=True, keep_local=True, - ) + ), params: zip=RESOURCES + f"WDPA_{bYYYY}_shp.zip", folder=directory(RESOURCES + f"WDPA_{bYYYY}"), @@ -279,15 +283,16 @@ if config["enable"]["retrieve"]: shell("unzip -o {params.zip} -d {params.folder}") for i in range(3): # vsizip is special driver for directly working with zipped shapefiles in ogr2ogr - layer_path = f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip" + layer_path = ( + f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip" + ) print(f"Adding layer {i+1} of 3 to combined output file.") shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}") - - # Downloading Marine protected area database from WDPA - # extract the main zip and then merge the contained 3 zipped shapefiles - # Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas rule download_wdpa_marine: + # Downloading Marine protected area database from WDPA + # extract the main zip and then merge the contained 3 zipped shapefiles + # Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas input: HTTP.remote( f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip", @@ -309,6 +314,7 @@ if config["enable"]["retrieve"]: shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}") + if config["enable"]["retrieve"]: rule retrieve_monthly_co2_prices: From cc08dff280b02553331dbb9b34a5a41c649e0236 Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 18 Dec 2023 12:44:21 +0100 Subject: [PATCH 8/9] simplify_network: revert fillna --- scripts/simplify_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/simplify_network.py b/scripts/simplify_network.py index 48a58ed48..f88d10d48 100644 --- a/scripts/simplify_network.py +++ b/scripts/simplify_network.py @@ -243,7 +243,7 @@ def _aggregate_and_move_components( def replace_components(n, c, df, pnl): n.mremove(c, n.df(c).index) - import_components_from_dataframe(n, df.fillna(0), c) + import_components_from_dataframe(n, df, c) for attr, df in pnl.items(): if not df.empty: import_series_from_dataframe(n, df, c, attr) From d39b579346607a7ffd8ffe71bcc9594a319e6d0d Mon Sep 17 00:00:00 2001 From: Fabian Date: Mon, 18 Dec 2023 12:56:08 +0100 Subject: [PATCH 9/9] build_biomass_potentials: fix groupby and transpose --- scripts/build_biomass_potentials.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/build_biomass_potentials.py b/scripts/build_biomass_potentials.py index 27b2e0a39..d7c467cf6 100644 --- a/scripts/build_biomass_potentials.py +++ b/scripts/build_biomass_potentials.py @@ -263,7 +263,7 @@ def convert_nuts2_to_regions(bio_nuts2, regions): df.to_csv(snakemake.output.biomass_potentials_all) grouper = {v: k for k, vv in params["classes"].items() for v in vv} - df = df.T.groupby(grouper).sum() + df = df.T.groupby(grouper).sum().T df *= 1e6 # TWh/a to MWh/a df.index.name = "MWh/a"