Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix pre-commit #240

Merged
merged 2 commits into from
Oct 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 9 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,18 @@ repos:
rev: v2.3.0
hooks:
- id: codespell
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore,vor'] # Ignore capital case words, e.g. country codes
args: ['--ignore-regex="(\b[A-Z]+\b)"', '--ignore-words-list=fom,appartment,bage,ore,setis,tabacco,berfore,vor,pris'] # Ignore capital case words, e.g. country codes
types_or: [python, rst, markdown]
files: ^(scripts|doc)/

# Make docstrings PEP 257 compliant
- repo: https://github.com/PyCQA/docformatter
rev: v1.7.5
hooks:
- id: docformatter
args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]
# Make docstrings PEP 257 compliant
# Broken for pre-commit<=4.0.0
# See https://github.com/PyCQA/docformatter/issues/293
# - repo: https://github.com/PyCQA/docformatter
# rev: v1.7.5
# hooks:
# - id: docformatter
# args: ["--in-place", "--make-summary-multi-line", "--pre-summary-newline"]

- repo: https://github.com/keewis/blackdoc
rev: v0.3.9
Expand Down
11 changes: 6 additions & 5 deletions workflow/Snakefile
Original file line number Diff line number Diff line change
Expand Up @@ -631,11 +631,12 @@ rule plot_ariadne_report:
regions_onshore_clustered=expand(
resources("regions_onshore_base_s_{clusters}.geojson"),
clusters=config["scenario"]["clusters"],
allow_missing=True
allow_missing=True,
),
rc="matplotlibrc",
output:
elec_price_duration_curve=RESULTS + "ariadne/report/elec_price_duration_curve.png",
elec_price_duration_curve=RESULTS
+ "ariadne/report/elec_price_duration_curve.png",
elec_price_duration_hist=RESULTS + "ariadne/report/elec_price_duration_hist.png",
results=directory(RESULTS + "ariadne/report"),
elec_transmission=directory(RESULTS + "ariadne/report/elec_transmission"),
Expand All @@ -646,14 +647,14 @@ rule plot_ariadne_report:
resources:
mem_mb=10000,
log:
RESULTS
+ "logs/plot_ariadne_report.log",
RESULTS + "logs/plot_ariadne_report.log",
script:
"scripts/plot_ariadne_report.py"


rule ariadne_report_only:
input:
expand(
RESULTS + "ariadne/report/elec_price_duration_curve.png",
run=config_provider("run", "name"),
),
),
48 changes: 18 additions & 30 deletions workflow/scripts/export_ariadne_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -1552,7 +1552,6 @@ def get_secondary_energy(n, region, _industry_demand):
total_oil_fuel_usage * oil_fractions["Efuel"]
)


try:
methanol_production = (
n.statistics.supply(bus_carrier="methanol", **kwargs)
Expand All @@ -1572,7 +1571,6 @@ def get_secondary_energy(n, region, _industry_demand):
except KeyError:
var["Secondary Energy|Methanol"] = 0


var["Secondary Energy|Liquids|Hydrogen"] += var["Secondary Energy|Methanol"]

var["Secondary Energy|Liquids|Biomass"] = (
Expand Down Expand Up @@ -2143,7 +2141,6 @@ def get_final_energy(
+ var["Final Energy|Transportation|Methanol"]
)


# var["Final Energy|Transportation|Liquids|Biomass"] = \
# var["Final Energy|Transportation|Liquids|Synthetic Fossil"] = \
var["Final Energy|Transportation|Liquids|Petroleum"] = (
Expand Down Expand Up @@ -2184,9 +2181,9 @@ def get_final_energy(
var["Final Energy|Bunkers|Navigation|Liquids"] * oil_fractions[fraction_key]
)

var["Final Energy|Bunkers|Navigation"] = (
var["Final Energy|Bunkers|Navigation|Liquids"]
)
var["Final Energy|Bunkers|Navigation"] = var[
"Final Energy|Bunkers|Navigation|Liquids"
]

# var["Final Energy|Bunkers|Navigation|Gases"] = \
# ! Not implemented
Expand Down Expand Up @@ -3915,53 +3912,46 @@ def get_export_import_links(n, region, carriers):
DE_bio_fraction = 0
else:
DE_bio_fraction = (
DE_renewable_oil.filter(like="bio").sum()
/ DE_renewable_oil.sum()
DE_renewable_oil.filter(like="bio").sum() / DE_renewable_oil.sum()
)

if EU_renewable_oil.sum() == 0:
EU_bio_fraction = 0
else:
EU_bio_fraction = (
EU_renewable_oil.filter(like="bio").sum()
/ EU_renewable_oil.sum()
EU_renewable_oil.filter(like="bio").sum() / EU_renewable_oil.sum()
)

exports_oil_renew, imports_oil_renew = get_export_import_links(
n, region, ["renewable oil", "methanol"]
)

var["Trade|Secondary Energy|Liquids|Biomass|Volume"] = (
exports_oil_renew * DE_bio_fraction
- imports_oil_renew * EU_bio_fraction
exports_oil_renew * DE_bio_fraction - imports_oil_renew * EU_bio_fraction
) * MWh2PJ

var["Trade|Secondary Energy|Liquids|Biomass|Gross Import|Volume"] = (
imports_oil_renew * EU_bio_fraction * MWh2PJ
)

exports_meoh, imports_meoh = get_export_import_links(
n, region, ["methanol"]
)
exports_meoh, imports_meoh = get_export_import_links(n, region, ["methanol"])

var["Trade|Secondary Energy|Liquids|Hydrogen|Volume"] = (
exports_oil_renew * (1 - DE_bio_fraction)
- imports_oil_renew * (1 - EU_bio_fraction)
+ exports_meoh - imports_meoh
+ exports_meoh
- imports_meoh
) * MWh2PJ

var["Trade|Secondary Energy|Liquids|Hydrogen|Gross Import|Volume"] = (
imports_oil_renew * (1 - EU_bio_fraction)
+ imports_meoh
imports_oil_renew * (1 - EU_bio_fraction) + imports_meoh
) * MWh2PJ

var["Trade|Secondary Energy|Methanol|Volume"] = (
exports_meoh - imports_meoh
) * MWh2PJ

var["Trade|Secondary Energy|Methanol|Gross Import|Volume"] = (
imports_meoh * MWh2PJ
)
var["Trade|Secondary Energy|Methanol|Gross Import|Volume"] = imports_meoh * MWh2PJ

# Trade|Secondary Energy|Gases|Hydrogen|Volume

Expand Down Expand Up @@ -4328,9 +4318,7 @@ def get_operational_and_capital_costs(year):

def hack_DC_projects(n, model_year):
logger.info(f"Hacking DC projects for year {model_year}")
logger.warning(
f"Assuming all indices of DC projects start with 'DC' or 'TYNDP'"
)
logger.warning(f"Assuming all indices of DC projects start with 'DC' or 'TYNDP'")
tprojs = n.links.loc[
(n.links.index.str.startswith("DC") | n.links.index.str.startswith("TYNDP"))
& ~n.links.reversed
Expand Down Expand Up @@ -4370,7 +4358,7 @@ def hack_DC_projects(n, model_year):
# Past projects should have their p_nom_opt bigger or equal to p_nom
if model_year <= 2035:
assert (
n.links.loc[past_projects, "p_nom_opt"] + 0.1 # numerical error tolerance
n.links.loc[past_projects, "p_nom_opt"] + 0.1 # numerical error tolerance
>= n.links.loc[past_projects, "p_nom"]
).all()

Expand All @@ -4381,10 +4369,8 @@ def hack_AC_projects(n, n_start, model_year):
logger.info(f"Hacking AC projects for year {model_year}")

# All transmission projects have build_year > 0, this is implicit in the query
ac_projs = n.lines.query(
"@model_year - 5 < build_year <= @model_year"
).index

ac_projs = n.lines.query("@model_year - 5 < build_year <= @model_year").index

s_nom_start = n_start.lines.loc[ac_projs, "s_nom"]

# Eventhough the lines are available to the model from the start,
Expand All @@ -4396,11 +4382,13 @@ def hack_AC_projects(n, n_start, model_year):

return n


def hack_transmission_projects(n, n_start, model_year):
n = hack_DC_projects(n, model_year)
n = hack_AC_projects(n, n_start, model_year)
return n


def get_ariadne_var(
n,
industry_demand,
Expand Down
2 changes: 1 addition & 1 deletion workflow/scripts/modify_prenetwork.py
Original file line number Diff line number Diff line change
Expand Up @@ -1134,7 +1134,7 @@ def drop_duplicate_transmission_projects(n):
n.mremove("Line", to_drop)

# This is a hot fix until the lines get properly removed in pypsa-eur
manual = ["TYNDP2020_1", "TYNDP2020_2", "TYNDP2020_23"] # DC3, DC4, DC1
manual = ["TYNDP2020_1", "TYNDP2020_2", "TYNDP2020_23"] # DC3, DC4, DC1
for line in manual:
if line in n.lines.index:
n.remove("Line", line)
Expand Down
Loading
Loading