Skip to content

Commit

Permalink
Add usage to README and add tests with private data and fix a few bugs (
Browse files Browse the repository at this point in the history
#11)

* Minor fixes and tests

* add more tests

* usage docs

* fixup qpsi

* Apply suggestions from code review

Co-authored-by: Oliver Funk <oliverfunk@users.noreply.github.com>

* no suggestion

---------

Co-authored-by: Oliver Funk <oliverfunk@users.noreply.github.com>
  • Loading branch information
je-cook and oliverfunk authored Jun 10, 2024
1 parent f78a0e5 commit e2fb1bb
Show file tree
Hide file tree
Showing 10 changed files with 512 additions and 44 deletions.
5 changes: 4 additions & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,13 +41,16 @@ jobs:

- name: Checkout bluemira-private-data
uses: actions/checkout@v4
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
repository: "Fusion-Power-Plant-Framework/bluemira-private-data"
ssh-key: ${{ secrets.BLUEMIRA_PRIVATE_DATA_DEPLOY_KEY }}
path: "./tests/test_data/private"

- name: Run all tests with coverage
run: hatch run test:tests-cov
env:
PRIVATE: ${{ github.event.pull_request.head.repo.fork && 'tests-cov' || 'tests-cov-private' }}
run: hatch run test:${PRIVATE}

# docs:
# runs-on: ubuntu-latest
Expand Down
20 changes: 19 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

[![Hatch project](https://img.shields.io/badge/%F0%9F%A5%9A-Hatch-4051b5.svg)](https://github.com/pypa/hatch)
[![linting - Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
[![Actions status](https://github.com/Fusion-Power-Plant-Framework/eqdsk/actions/workflows/push-main.yml/badge.svg)](https://github.com//Fusion-Power-Plant-Framework/eqdsk/actions)
[![Actions status](https://github.com/Fusion-Power-Plant-Framework/eqdsk/actions/workflows/main.yml/badge.svg)](https://github.com//Fusion-Power-Plant-Framework/eqdsk/actions)

An EQDSK reader and writer for GEQDSK (more soon), with COCOS identification and conversion.

Expand All @@ -25,3 +25,21 @@ pip install git+https://github.com/Fusion-Power-Plant-Framework/eqdsk.git
```

For a developer setup please see [CONTRIBUTING.md](CONTRIBUTING.md#setup-with-hatch)

## Basic Usage

To read in an eqdsk (json or eqdsk) in its raw state:
```python
from eqdsk import EQDSKInterface

EQDSKInterface.from_file('file.json', no_cocos=True)
```
To read in an eqdsk file with a known cocos format and convert it to a given cocos format:
```python
EQDSKInterface.from_file('file.eqdsk', from_cocos=11, to_cocos=17)
```
Alternatively if the direction (clockwise or anticlockwise) and the units of phi (V.s or V.s/rad) are known,
the cocos standard will be calculated for you:
```python
EQDSKInterface.from_file('file.eqdsk', clockwise_phi=True, volt_seconds_per_radian=True)
```
16 changes: 4 additions & 12 deletions eqdsk/cocos.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,16 +275,12 @@ def identify_eqdsk(
A list of the identified COCOS definitions.
"""
if eqdsk.qpsi is None:
eqdsk_warn(
"WARNING: qpsi is not defined in the eqdsk file. Setting to 1"
)
eqdsk_warn("WARNING: qpsi is not defined in the eqdsk file. Setting to 1")
eqdsk.qpsi = np.array([1])

cw_phi_l = [True, False] if clockwise_phi is None else [clockwise_phi]
vs_pr_l = (
[True, False]
if volt_seconds_per_radian is None
else [volt_seconds_per_radian]
[True, False] if volt_seconds_per_radian is None else [volt_seconds_per_radian]
)

definitions = [
Expand Down Expand Up @@ -349,9 +345,7 @@ def identify_cocos(

sign_Ip = Sign(np.sign(plasma_current))
sign_B0 = Sign(np.sign(b_toroidal))
sign_psi_inc_towards_boundary = Sign(
np.sign(psi_at_boundary - psi_at_mag_axis)
)
sign_psi_inc_towards_boundary = Sign(np.sign(psi_at_boundary - psi_at_mag_axis))

sign_q = np.sign(q_psi)
if sign_q.min() != sign_q.max():
Expand All @@ -371,9 +365,7 @@ def identify_cocos(
)


def transform_cocos(
from_cocos_index: int, to_cocos_index: int
) -> COCOSTransform:
def transform_cocos(from_cocos_index: int, to_cocos_index: int) -> COCOSTransform:
"""Return the transformation needed to transform from one COCOS
to another.
"""
Expand Down
52 changes: 34 additions & 18 deletions eqdsk/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,9 +241,7 @@ def identify(
)

if as_cocos_index is not None:
matching_conv = [
c for c in conventions if c.index == as_cocos_index
]
matching_conv = [c for c in conventions if c.index == as_cocos_index]
if not matching_conv:
raise ValueError(
f"No convention found that matches "
Expand All @@ -256,7 +254,7 @@ def identify(
if len(conventions) != 1:
eqdsk_warn(
f"A single COCOS could not be determined, "
f"found conventions ({', '.join([str(c.index) for c in conventions])}) " # noqa: E501
f"found conventions ({', '.join([str(c.index) for c in conventions])}) "
f"for the EQDSK file. Choosing COCOS {conv.index}.",
)
eqdsk_print(f"EQDSK identified as COCOS {conv.index}.")
Expand All @@ -266,7 +264,7 @@ def as_cocos(self, cocos_index: int) -> EQDSKInterface:
"""Return a copy of this eqdsk converted to the given COCOS."""
if self.cocos.index == cocos_index:
return self
eqdsk_print(f"Converting EQDSK to COCOS {cocos_index}")
eqdsk_print(f"Converting EQDSK to COCOS {cocos_index}.")
return convert_eqdsk(self, cocos_index)

def to_dict(self) -> dict:
Expand Down Expand Up @@ -299,6 +297,10 @@ def write(
json_kwargs = {} if json_kwargs is None else json_kwargs
json_writer(self.to_dict(), file_path, **json_kwargs)
elif file_format in {"eqdsk", "geqdsk"}:
eqdsk_warn(
"You are in the 21st century. "
"Are you sure you want to be making an EDQSK in this day and age?"
)
_write_eqdsk(file_path, self.to_dict())

def update(self, eqdsk_data: dict[str, Any]):
Expand Down Expand Up @@ -328,9 +330,18 @@ def update(self, eqdsk_data: dict[str, Any]):
def _read_json(file_path: Path) -> dict[str, Any]:
with file_path.open() as file:
data = json.load(file)
for k, value in data.items():
if isinstance(value, list):
data[k] = np.asarray(value)

for k, value in data.items():
if isinstance(value, list):
data[k] = np.asarray(value)

# For backward compatibility where 'psinorm' was sometimes 'pnorm'
if "pnorm" in data:
if "psinorm" in data:
del data["pnorm"]
else:
data["psinorm"] = data.pop("pnorm")

return data


Expand Down Expand Up @@ -513,7 +524,7 @@ def _write_eqdsk(file_path: str | Path, data: dict):
"""
file_path = Path(file_path)
if file_path.suffix not in EQDSK_EXTENSIONS:
file_path = Path(file_path).with_suffix("").with_suffix(".eqdsk")
file_path = file_path.with_suffix(".eqdsk")

with Path(file_path).open("w") as file:

Expand All @@ -540,13 +551,11 @@ def write_header(
Empty strings will be recorded as 0.
"""
line = [id_string]
line += [data[v] if not v else 0 for v in var_list]
line += [data[v] if v else 0 for v in var_list]
file.write(fortran_format.write(line))
file.write("\n")

def write_line(
fortran_format: ff.FortranRecordWriter, var_list: list[str]
):
def write_line(fortran_format: ff.FortranRecordWriter, var_list: list[str]):
"""Write a line of variable values out to a G-EQDSK file.
Parameters
Expand All @@ -559,13 +568,11 @@ def write_line(
variables to added to the current line.
Empty strings will be recorded as 0.
"""
line = [data[v] if not v else 0 for v in var_list]
line = [data[v] if v else 0 for v in var_list]
file.write(fortran_format.write(line))
file.write("\n")

def write_array(
fortran_format: ff.FortranRecordWriter, array: np.ndarray
):
def write_array(fortran_format: ff.FortranRecordWriter, array: np.ndarray):
"""Write a numpy array out to a G-EQDSK file.
Parameters
Expand All @@ -592,7 +599,16 @@ def write_array(
file_id_string = f"{trimmed_name}_{timestamp}"

# Define dummy data for qpsi if it has not been previously defined.
qpsi = np.zeros(data["nx"]) if data["qpsi"] is None else data["qpsi"]
qpsi = (
np.ones(data["nx"]) if data["qpsi"] is None else np.atleast_1d(data["qpsi"])
)

if len(qpsi) == 1:
qpsi = np.full(data["nx"], qpsi)
elif len(qpsi) != data["nx"]:
raise ValueError(
"the length of qpsi should be 1 or the number of x grid points"
)

# Create array containing coilset information.
coil = np.zeros(5 * data["ncoil"])
Expand Down
9 changes: 7 additions & 2 deletions eqdsk/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
"""Eqdsk tools"""

from json import JSONEncoder, dumps
from pathlib import Path

import numpy as np
import numpy.typing as npt
Expand All @@ -25,7 +26,7 @@ def default(self, obj):

def json_writer(
data: dict,
file: str | None = None,
file: str | Path | None = None,
*,
return_output: bool = False,
cls=NumpyJSONEncoder,
Expand Down Expand Up @@ -57,6 +58,10 @@ def json_writer(
the_json = dumps(data, cls=cls, **kwargs)

if file is not None:
file = Path(file)
if file.suffix != ".json":
file = file.with_suffix(".json")

with open(file, "w") as fh:
fh.write(the_json)
fh.write("\n")
Expand All @@ -79,7 +84,7 @@ def is_num(thing) -> bool:
num:
Whether or not the input is a number
"""
if thing is True or thing is False:
if thing in {True, False}:
return False
try:
thing = floatify(thing)
Expand Down
8 changes: 6 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ python = ["3.10", "3.11", "3.12"]
[tool.hatch.envs.test.scripts]
tests = "pytest {args:tests}"
tests-cov = "pytest --cov eqdsk --cov-report html:htmlcov_eqdsk --cov-report xml {args:tests}"
tests-cov-private = "pytest --private --cov eqdsk --cov-report html:htmlcov --cov-report xml {args:tests}"

# env: docs
[tool.hatch.envs.docs]
Expand All @@ -80,7 +81,7 @@ all = ["style", "typing"]
# tool: ruff
[tool.ruff]
target-version = "py310"
line-length = 80
line-length = 89
exclude = [
".git",
"__pycache__",
Expand Down Expand Up @@ -117,7 +118,6 @@ ignore = [
"PTH123", # use Path.open
"TRY003", # put error messages in error class
"FURB152", # 3.14 != pi
"PIE790", # TMP for false positive ...
]

[tool.ruff.lint.isort]
Expand All @@ -141,6 +141,9 @@ convention = "numpy"
"D104",
"ERA001",
"PLR2004",
"PLR0912",
"PLR0914",
"N802",
"S101",
"TID252",
]
Expand Down Expand Up @@ -168,3 +171,4 @@ addopts = "--html=report.html --self-contained-html --strict-markers -r f
log_cli = true
log_cli_format = "%(asctime)s [%(levelname)8s] %(message)s"
log_cli_level = "INFO"
markers = ["private: Tests using private data"]
3 changes: 3 additions & 0 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
# SPDX-FileCopyrightText: 2023-present The Bluemira Developers <https://github.com/Fusion-Power-Plant-Framework/bluemira>
#
# SPDX-License-Identifier: LGPL-2.1-or-later
Loading

0 comments on commit e2fb1bb

Please sign in to comment.