Skip to content

Commit

Permalink
Merge pull request #805 from PyPSA/env_fixes
Browse files Browse the repository at this point in the history
Env fixes
  • Loading branch information
FabianHofmann authored Dec 18, 2023
2 parents 798a6b8 + d39b579 commit 179ab43
Show file tree
Hide file tree
Showing 6 changed files with 28 additions and 21 deletions.
7 changes: 4 additions & 3 deletions envs/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ dependencies:
- pip

- atlite>=0.2.9
- pypsa
- pypsa>=0.26.0
- linopy
- dask

Expand All @@ -27,9 +27,9 @@ dependencies:
- lxml
- powerplantmatching>=0.5.5
- numpy
- pandas>=1.4
- pandas>=2.1
- geopandas>=0.11.0
- xarray
- xarray>=2023.11.0
- rioxarray
- netcdf4
- networkx
Expand All @@ -46,6 +46,7 @@ dependencies:
- tabula-py
- pyxlsb
- graphviz
- ipopt

# Keep in conda environment when calling ipython
- ipython
Expand Down
34 changes: 20 additions & 14 deletions rules/retrieve.smk
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ if config["enable"]["retrieve"]:
run:
move(input[0], output[0])


if config["enable"]["retrieve"]:

# Downloading Copernicus Global Land Cover for land cover and land use:
Expand All @@ -238,26 +239,29 @@ if config["enable"]["retrieve"]:
static=True,
),
output:
RESOURCES + "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
run: move(input[0], output[0])
RESOURCES
+ "Copernicus_LC100_global_v3.0.1_2019-nrt_Discrete-Classification-map_EPSG-4326.tif",
run:
move(input[0], output[0])


if config["enable"]["retrieve"]:

current_month = datetime.now().strftime('%b')
current_year = datetime.now().strftime('%Y')
current_month = datetime.now().strftime("%b")
current_year = datetime.now().strftime("%Y")
bYYYY = f"{current_month}{current_year}"

def check_file_exists(url):
response = requests.head(url)
return response.status_code == 200

url = f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip'
url = f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"

if not check_file_exists(url):
prev_month = (datetime.now()-timedelta(30)).strftime('%b')
prev_month = (datetime.now() - timedelta(30)).strftime("%b")
bYYYY = f"{prev_month}{current_year}"
assert check_file_exists(f'https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip'), "The file does not exist."
assert check_file_exists(
f"https://d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public.zip"
), "The file does not exist."

# Downloading protected area database from WDPA
# extract the main zip and then merge the contained 3 zipped shapefiles
Expand All @@ -268,7 +272,7 @@ if config["enable"]["retrieve"]:
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_{bYYYY}_Public_shp.zip",
static=True,
keep_local=True,
)
),
params:
zip=RESOURCES + f"WDPA_{bYYYY}_shp.zip",
folder=directory(RESOURCES + f"WDPA_{bYYYY}"),
Expand All @@ -279,15 +283,16 @@ if config["enable"]["retrieve"]:
shell("unzip -o {params.zip} -d {params.folder}")
for i in range(3):
# vsizip is special driver for directly working with zipped shapefiles in ogr2ogr
layer_path = f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip"
layer_path = (
f"/vsizip/{params.folder}/WDPA_{bYYYY}_Public_shp_{i}.zip"
)
print(f"Adding layer {i+1} of 3 to combined output file.")
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")


# Downloading Marine protected area database from WDPA
# extract the main zip and then merge the contained 3 zipped shapefiles
# Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas
rule download_wdpa_marine:
# Downloading Marine protected area database from WDPA
# extract the main zip and then merge the contained 3 zipped shapefiles
# Website: https://www.protectedplanet.net/en/thematic-areas/marine-protected-areas
input:
HTTP.remote(
f"d1gam3xoknrgr2.cloudfront.net/current/WDPA_WDOECM_{bYYYY}_Public_marine_shp.zip",
Expand All @@ -309,6 +314,7 @@ if config["enable"]["retrieve"]:
shell("ogr2ogr -f gpkg -update -append {output.gpkg} {layer_path}")



if config["enable"]["retrieve"]:

rule retrieve_monthly_co2_prices:
Expand Down
2 changes: 1 addition & 1 deletion scripts/base_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -560,7 +560,7 @@ def prefer_voltage(x, which):
~buses["under_construction"]
)

c_nan_b = buses.country.isnull()
c_nan_b = buses.country == "na"
if c_nan_b.sum() > 0:
c_tag = _get_country(buses.loc[c_nan_b])
c_tag.loc[~c_tag.isin(countries)] = np.nan
Expand Down
2 changes: 1 addition & 1 deletion scripts/build_biomass_potentials.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ def convert_nuts2_to_regions(bio_nuts2, regions):
df.to_csv(snakemake.output.biomass_potentials_all)

grouper = {v: k for k, vv in params["classes"].items() for v in vv}
df = df.groupby(grouper, axis=1).sum()
df = df.T.groupby(grouper).sum().T

df *= 1e6 # TWh/a to MWh/a
df.index.name = "MWh/a"
Expand Down
2 changes: 1 addition & 1 deletion scripts/cluster_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ def plot_busmap_for_n_clusters(n, n_clusters, fn=None):
n = pypsa.Network(snakemake.input.network)

# remove integer outputs for compatibility with PyPSA v0.26.0
n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore')
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")

exclude_carriers = params.cluster_network["exclude_carriers"]
aggregate_carriers = set(n.generators.carrier) - set(exclude_carriers)
Expand Down
2 changes: 1 addition & 1 deletion scripts/simplify_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -537,7 +537,7 @@ def cluster(
Nyears = n.snapshot_weightings.objective.sum() / 8760

# remove integer outputs for compatibility with PyPSA v0.26.0
n.generators.drop("n_mod", axis=1, inplace=True, errors='ignore')
n.generators.drop("n_mod", axis=1, inplace=True, errors="ignore")

n, trafo_map = simplify_network_to_380(n)

Expand Down

0 comments on commit 179ab43

Please sign in to comment.