Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Inclusion of Enhanced Geothermal Systems (EGS) #490

Open
wants to merge 7 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions workflow/repo_data/config/config.common.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
foresight: 'perfect'
# docs :
renewable:
EGS:
dispatch: baseload # baseload or flexible
onwind:
cutout: era5
resource:
Expand Down
4 changes: 3 additions & 1 deletion workflow/repo_data/config/config.plotting.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,8 @@ plotting:
"12hr_PHS_discharger": "#047d6c"
"8hr_PHS_charger": "#069686"
"10hr_PHS_charger": "#058a79"
"12hr_PHS_charger": "#047d6c"
"12hr_PHS_charger": "#047d6c"
"EGS": "#d18372"

# sector studies only

Expand Down Expand Up @@ -258,6 +259,7 @@ plotting:
lines: "Transmission Lines"
ror: "Run of River"
Load: "Load Shed"
EGS: "Enhanced Geothermal"

# sector studies only

Expand Down
21 changes: 16 additions & 5 deletions workflow/rules/build_electricity.smk
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ rule build_shapes:
"logs/build_shapes/{interconnect}.log",
threads: 1
resources:
mem_mb=2000,
mem_mb=5000,
script:
"../scripts/build_shapes.py"

Expand Down Expand Up @@ -57,7 +57,7 @@ rule build_base_network:
"logs/create_network/{interconnect}.log",
threads: 1
resources:
mem_mb=1000,
mem_mb=5000,
script:
"../scripts/build_base_network.py"

Expand Down Expand Up @@ -107,7 +107,7 @@ rule build_cost_data:
LOGS + "costs_{year}.log",
threads: 1
resources:
mem_mb=1000,
mem_mb=5000,
script:
"../scripts/build_cost_data.py"

Expand All @@ -134,7 +134,7 @@ if config["enable"].get("build_cutout", False):
"benchmarks/" + CDIR + "build_cutout_{interconnect}_{cutout}"
threads: ATLITE_NPROCESSES
resources:
mem_mb=ATLITE_NPROCESSES * 1000,
mem_mb=ATLITE_NPROCESSES * 5000,
script:
"../scripts/build_cutout.py"

Expand Down Expand Up @@ -208,10 +208,11 @@ rule build_renewable_profiles:
benchmark:
BENCHMARKS + "{interconnect}/build_renewable_profiles_{technology}"
threads: ATLITE_NPROCESSES
retries: 3
resources:
mem_mb=ATLITE_NPROCESSES * 5000,
wildcard_constraints:
technology="(?!hydro).*", # Any technology other than hydro
technology="(?!hydro|EGS).*", # Any technology other than hydro
script:
"../scripts/build_renewable_profiles.py"

Expand Down Expand Up @@ -611,6 +612,16 @@ rule add_electricity:
hydro_breakthrough=DATA + "breakthrough_network/base_grid/hydro.csv",
bus2sub=RESOURCES + "{interconnect}/bus2sub.csv",
pudl_fuel_costs=RESOURCES + "{interconnect}/pudl_fuel_costs.csv",
specs_egs=(
DATA + "EGS/{interconnect}/specs_EGS.nc"
if "EGS" in config["electricity"]["extendable_carriers"]["Generator"]
else []
),
profile_egs=(
DATA + "EGS/{interconnect}/profile_EGS.nc"
if "EGS" in config["electricity"]["extendable_carriers"]["Generator"]
else []
),
output:
RESOURCES + "{interconnect}/elec_base_network_l_pp.pkl",
log:
Expand Down
4 changes: 2 additions & 2 deletions workflow/rules/build_sector.smk
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ rule build_simplified_population_layouts:
output:
clustered_pop_layout=RESOURCES + "{interconnect}/pop_layout_elec_s.csv",
resources:
mem_mb=10000,
mem_mb=50000,
log:
LOGS + "{interconnect}/build_simplified_population_layouts",
benchmark:
Expand Down Expand Up @@ -174,7 +174,7 @@ rule build_clustered_population_layouts:
LOGS
+ "{interconnect}/build_clustered_population_layouts_{simpl}_{clusters}.log",
resources:
mem_mb=10000,
mem_mb=50000,
benchmark:
(
BENCHMARKS
Expand Down
16 changes: 8 additions & 8 deletions workflow/rules/common.smk
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def solver_threads(w):


def memory(w):
factor = 4.0
factor = 10.0
for o in w.opts.split("-"):
m = re.match(r"^(\d+)h$", o, re.IGNORECASE)
if m is not None:
Expand Down Expand Up @@ -119,25 +119,25 @@ def interconnect_mem(w):


def interconnect_mem_a(w):
mem = 15000 * len(config_provider("scenario", "planning_horizons")(w))
mem = 30000 * len(config_provider("scenario", "planning_horizons")(w))
if w.interconnect == "usa":
return int(mem * 4)
elif w.interconnect == "eastern":
return int(mem * 1.5)
return int(mem * 3)
elif w.interconnect == "western":
return int(mem)
return int(mem * 2)
elif w.interconnect == "texas":
return int(mem * 0.5)


def interconnect_mem_s(w):
mem = 15000 * len(config_provider("scenario", "planning_horizons")(w))
mem = 30000 * len(config_provider("scenario", "planning_horizons")(w))
if w.interconnect == "usa":
return int(mem * 4)
elif w.interconnect == "eastern":
return int(mem * 3)
elif w.interconnect == "western":
return int(mem)
return int(mem * 2)
elif w.interconnect == "texas":
return int(mem * 0.5)

Expand All @@ -149,7 +149,7 @@ def interconnect_mem_c(w):
elif w.interconnect == "eastern":
return int(mem * 3)
elif w.interconnect == "western":
return int(mem) * 2
return int(mem * 2)
elif w.interconnect == "texas":
return int(mem * 0.75)

Expand All @@ -161,7 +161,7 @@ def interconnect_mem_prepare(w):
elif w.interconnect == "eastern":
return int(mem * 3)
elif w.interconnect == "western":
return int(mem) * 2
return int(mem * 2)
elif w.interconnect == "texas":
return int(mem * 0.75)

Expand Down
23 changes: 22 additions & 1 deletion workflow/rules/retrieve.smk
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ rule retrieve_zenodo_databundles:
DATA + "breakthrough_network/base_grid/{file}", file=breakthrough_datafiles
),
expand(DATA + "{file}", file=pypsa_usa_datafiles),
resources:
mem_mb=5000,
log:
"logs/retrieve/retrieve_databundles.log",
script:
Expand All @@ -65,6 +67,8 @@ rule retrieve_nrel_efs_data:
efs_databundle,
output:
DATA + "nrel_efs/EFSLoadProfile_{efs_case}_{efs_speed}.csv",
resources:
mem_mb=5000,
log:
"logs/retrieve/retrieve_efs_{efs_case}_{efs_speed}.log",
script:
Expand Down Expand Up @@ -236,6 +240,23 @@ rule retrieve_pudl:
log:
LOGS + "retrieve_pudl.log",
resources:
mem_mb=1000,
mem_mb=5000,
script:
"../scripts/retrieve_pudl.py"


if "EGS" in config["electricity"]["extendable_carriers"]["Generator"]:

rule retrieve_egs:
params:
dispatch=config["renewable"]["EGS"]["dispatch"],
subdir=DATA + "EGS/{interconnect}",
output:
DATA + "EGS/{interconnect}/specs_EGS.nc",
DATA + "EGS/{interconnect}/profile_EGS.nc",
resources:
mem_mb=5000,
log:
LOGS + "retrieve_EGS_{interconnect}.log",
script:
"../scripts/retrieve_egs.py"
2 changes: 1 addition & 1 deletion workflow/run_slurm.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
# SLURM specifications made in default.cluster.yaml & the individual rules
# GRB_LICENSE_FILE=/share/software/user/restricted/gurobi/11.0.2/licenses/gurobi.lic⁠
snakemake --cluster "sbatch -A {cluster.account} --mail-type ALL --mail-user {cluster.email} -p {cluster.partition} -t {cluster.walltime} -o {cluster.output} -e {cluster.error} -c {threads} --mem {resources.mem_mb}" --cluster-config config/config.cluster.yaml --jobs 10 --latency-wait 60 --configfile config/base_paper/config.validation.yaml --rerun-incomplete -R build_powerplants
snakemake --cluster "sbatch -A {cluster.account} --mail-type ALL --mail-user {cluster.email} -p {cluster.partition} -t {cluster.walltime} -o {cluster.output} -e {cluster.error} -c {threads} --mem {resources.mem_mb}" --cluster-config config/config.cluster.yaml --jobs 10 --latency-wait 60 --configfile config/config.egs_study.yaml
2 changes: 1 addition & 1 deletion workflow/scripts/_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ def calculate_annuity(n, r):

def load_costs(tech_costs: str) -> pd.DataFrame:
df = pd.read_csv(tech_costs)
return df.pivot(index="technology", columns="parameter", values="value").fillna(0)
return df.pivot(index="pypsa-name", columns="parameter", values="value").fillna(0)


def load_network_for_plots(fn, tech_costs, config, combine_hydro_ps=True):
Expand Down
121 changes: 120 additions & 1 deletion workflow/scripts/add_electricity.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,10 @@
import pypsa
import xarray as xr
from _helpers import (
calculate_annuity,
configure_logging,
export_network_for_gis_mapping,
load_costs,
update_p_nom_max,
weighted_avg,
)
Expand Down Expand Up @@ -508,7 +510,7 @@ def attach_wind_and_solar(
"""
add_missing_carriers(n, carriers)
for car in carriers:
if car == "hydro":
if car in ["hydro", "EGS"]:
continue

with xr.open_dataset(getattr(input_profiles, "profile_" + car)) as ds:
Expand Down Expand Up @@ -572,6 +574,114 @@ def attach_wind_and_solar(
)


def attach_egs(
n: pypsa.Network,
costs: pd.DataFrame,
input_profiles: str,
carriers: list[str],
extendable_carriers: dict[str, list[str]],
line_length_factor=1,
):
"""
Attached STM Calculated wind and solar capacity factor profiles to the
network.
"""
car = "EGS"
if car not in carriers:
return

add_missing_carriers(n, carriers)
capital_recovery_period = 25 # Following EGS supply curves by Aljubran et al. (2024)
discount_rate = 0.07 # load_costs(snakemake.input.tech_costs).loc["geothermal", "wacc_real"]
drilling_cost = snakemake.config["renewable"]["EGS"]["drilling_cost"]

with xr.open_dataset(
getattr(input_profiles, "specs_egs"),
) as ds_specs, xr.open_dataset(
getattr(input_profiles, "profile_egs"),
) as ds_profile:

bus2sub = (
pd.read_csv(input_profiles.bus2sub, dtype=str)
.drop("interconnect", axis=1)
.rename(columns={"Bus": "bus_id"})
)

# IGNORE: Remove dropna(). Rather, apply dropna when creating the original dataset
df_specs = pd.merge(
ds_specs.to_dataframe().reset_index().dropna(),
bus2sub,
on="sub_id",
how="left",
)
df_specs["bus_id"] = df_specs["bus_id"].astype(str)

# bus_id must be in index for pypsa to read it
df_specs.set_index("bus_id", inplace=True)

# columns must be renamed to refer to the right quantities for pypsa to read it correctly
logger.info(f"Using {drilling_cost} EGS drilling costs.")
df_specs = df_specs.rename(
columns={
"advanced_capex_usd_kw" if drilling_cost == "advanced" else "capex_usd_kw": "capital_cost",
"avail_capacity_mw": "p_nom_max",
"fixed_om": "fixed_om",
},
)

# TODO: come up with proper values for these params

df_specs["capital_cost"] = 1000 * (
df_specs["capital_cost"] * calculate_annuity(capital_recovery_period, discount_rate) + df_specs["fixed_om"]
) # convert and annualize USD/kW to USD/MW-year
df_specs["efficiency"] = 1.0

df_specs = df_specs.loc[~(df_specs.index == "nan")]

# TODO: review what qualities need to be included. Currently limited for speedup.
qualities = [1] # df_specs.Quality.unique()

for q in qualities:
suffix = " " + car # + f" Q{q}"
df_q = df_specs[df_specs["Quality"] == q]

bus_list = df_q.index.values
capital_cost = df_q["capital_cost"]
p_nom_max_bus = df_q["p_nom_max"]
efficiency = df_q["efficiency"] # for now.

# IGNORE: Remove dropna(). Rather, apply dropna when creating the original dataset
df_q_profile = pd.merge(
ds_profile.sel(Quality=q).to_dataframe().dropna().reset_index(),
bus2sub,
on="sub_id",
how="left",
)
bus_profiles = pd.pivot_table(
df_q_profile,
columns="bus_id",
index=["year", "Date"],
values="capacity_factor",
)

logger.info(
f"Adding EGS (Resource Quality-{q}) capacity-factor profiles to the network.",
)

n.madd(
"Generator",
bus_list,
suffix,
bus=bus_list,
carrier=car,
p_nom_extendable=car in extendable_carriers["Generator"],
p_nom_max=p_nom_max_bus,
capital_cost=capital_cost,
efficiency=efficiency,
p_max_pu=bus_profiles,
)


def attach_battery_storage(
n: pypsa.Network,
costs: pd.DataFrame,
Expand Down Expand Up @@ -848,6 +958,15 @@ def main(snakemake):
)
plants = match_plant_to_bus(n, plants)

attach_egs(
n,
costs,
snakemake.input,
renewable_carriers,
extendable_carriers,
params.length_factor,
)

attach_conventional_generators(
n,
costs,
Expand Down
Loading