Skip to content

Commit

Permalink
Update the pre-commit hooks
Browse files Browse the repository at this point in the history
  • Loading branch information
peytondmurray committed Jun 3, 2023
1 parent 84f225f commit ed0f74d
Show file tree
Hide file tree
Showing 15 changed files with 112 additions and 131 deletions.
62 changes: 31 additions & 31 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,37 +9,37 @@
name: Release Package

on:
push:
tags:
- '*'
push:
tags:
- '*'

jobs:
release-tag:
name: "Tagged Release"
runs-on: "ubuntu-latest"
steps:
- name: Create a new GitHub release
uses: "marvinpinto/action-automatic-releases@latest"
with:
repo_token: "${{ secrets.GITHUB_TOKEN }}"
prerelease: false
release-tag:
name: 'Tagged Release'
runs-on: 'ubuntu-latest'
steps:
- name: Create a new GitHub release
uses: 'marvinpinto/action-automatic-releases@latest'
with:
repo_token: '${{ secrets.GITHUB_TOKEN }}'
prerelease: false

release-to-pypi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
release-to-pypi:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
38 changes: 11 additions & 27 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,42 +1,26 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: check-ast
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2
- repo: https://github.com/psf/black
rev: 23.3.0
hooks:
- id: flake8
additional_dependencies:
[
flake8-docstrings,
flake8-mutable,
flake8-debugger,
flake8-pytest-style,
pep8-naming,
flake8-simplify,
darglint,
]
- id: black
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.270
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.3.2
rev: v3.0.0-alpha.9-for-vscode
hooks:
- id: prettier
- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v0.910'
rev: 'v1.3.0'
hooks:
- id: mypy
- repo: https://github.com/pycqa/isort
rev: 5.9.3
hooks:
- id: isort
name: isort (python)
- id: isort
name: isort (cython)
types: [cython]
- id: isort
name: isort (pyi)
types: [pyi]
1 change: 0 additions & 1 deletion pyforc/core/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@
from . import config, coordinates, forc, forcdata, ingester, ops, plot
2 changes: 1 addition & 1 deletion pyforc/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class Config:

file_name: Optional[str] = None
step: Optional[float] = None
interpolation: str = 'cubic'
interpolation: str = "cubic"
drift_correction: bool = True
drift_kernel_size: int = 4
drift_density: int = 3
Expand Down
2 changes: 1 addition & 1 deletion pyforc/core/forc.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from .ingester import IngesterBase


class Forc():
class Forc:
"""
Generic for storing, extracting, and processing FORC data.
Expand Down
27 changes: 14 additions & 13 deletions pyforc/core/forcdata.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Containers for holding FORC data as arrays."""
from __future__ import annotations
from typing import Optional

import numpy as np

Expand All @@ -11,11 +12,11 @@ class ForcData:
Parameters
----------
h_raw : list[np.ndarray]
h_raw : Optional[list[np.ndarray]]
Raw h data
m_raw : list[np.ndarray]
m_raw : Optional[list[np.ndarray]]
Raw m data
t_raw : list[np.ndarray]
t_raw : Optional[list[np.ndarray]]
Raw t data
m_drift : np.ndarray
Values of the magnetization at the drift points.
Expand All @@ -33,9 +34,9 @@ class ForcData:

def __init__(
self,
h_raw: list[np.ndarray] = None,
m_raw: list[np.ndarray] = None,
t_raw: list[np.ndarray] = None,
h_raw: Optional[list[np.ndarray]] = None,
m_raw: Optional[list[np.ndarray]] = None,
t_raw: Optional[list[np.ndarray]] = None,
m_drift: np.ndarray = None,
h: np.ndarray = None,
hr: np.ndarray = None,
Expand Down Expand Up @@ -68,9 +69,9 @@ def get_step(self) -> float:
@staticmethod
def from_existing(
data: ForcData,
h_raw: list[np.ndarray] = None,
m_raw: list[np.ndarray] = None,
t_raw: list[np.ndarray] = None,
h_raw: Optional[list[np.ndarray]] = None,
m_raw: Optional[list[np.ndarray]] = None,
t_raw: Optional[list[np.ndarray]] = None,
m_drift: np.ndarray = None,
h: np.ndarray = None,
hr: np.ndarray = None,
Expand All @@ -84,11 +85,11 @@ def from_existing(
----------
data : ForcData
Data which is to be copied over to the new instance
h_raw : list[np.ndarray]
h_raw : Optional[list[np.ndarray]]
Raw magnetization data
m_raw : list[np.ndarray]
m_raw : Optional[list[np.ndarray]]
Raw magnetization data
t_raw : list[np.ndarray]
t_raw : Optional[list[np.ndarray]]
Raw temperature data
m_drift : np.ndarray
Drift magnetization measurements
Expand Down Expand Up @@ -190,7 +191,7 @@ def get_limits(
(x_min, x_max), (y_min, y_max)
"""
if mask:
data_mask = (self.h >= self.hr)
data_mask = self.h >= self.hr
h = self.h[data_mask].flatten()
hr = self.hr[data_mask].flatten()
else:
Expand Down
25 changes: 12 additions & 13 deletions pyforc/core/ingester.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,9 +60,9 @@ class PMCIngester(IngesterBase):
"""Ingester for data measured by Princeton Measurements Corporation (now Lakeshore) VSMs."""

pattern = (
r'(?P<h>([+-]\d+\.\d+(E[+-]\d+)?)),'
r'(?P<m>([+-]\d+\.\d+(E[+-]\d+)?))'
r'(,(?P<t>([+-]\d+\.\d+(E[+-]\d+)?)))?'
r"(?P<h>([+-]\d+\.\d+(E[+-]\d+)?)),"
r"(?P<m>([+-]\d+\.\d+(E[+-]\d+)?))"
r"(,(?P<t>([+-]\d+\.\d+(E[+-]\d+)?)))?"
)

def ingest(self) -> ForcData:
Expand All @@ -76,12 +76,12 @@ def ingest(self) -> ForcData:
if not self.config.file_name:
raise ValueError("No file name specified.")

with open(self.config.file_name, 'r') as f:
with open(self.config.file_name, "r") as f:
lines = f.readlines()

# Find first data line
i = 0
while i < len(lines) and lines[i][0] not in '-+':
while i < len(lines) and lines[i][0] not in "-+":
i += 1

header = lines[:i]
Expand Down Expand Up @@ -114,9 +114,9 @@ def ingest_curve(self, lines: list[str], i: int) -> tuple[np.ndarray, ...]:

if match:
groups = match.groupdict()
h_buf.append(float(groups['h']))
m_buf.append(float(groups['m']))
t_buf.append(float(groups['t']) if groups['t'] else np.nan)
h_buf.append(float(groups["h"]))
m_buf.append(float(groups["m"]))
t_buf.append(float(groups["t"]) if groups["t"] else np.nan)

else:
# End of the reversal curve
Expand Down Expand Up @@ -148,12 +148,11 @@ def ingest_from_hchb(self, lines: list[str], i: int) -> ForcData:
if match:
# Handle drift point
groups = match.groupdict()
m_drift.append(float(groups['m']))
m_drift.append(float(groups["m"]))

# Next line should be blank; line after is the start of the reversal curve data
if (
re.search(self.pattern, lines[i + 1])
or not re.search(self.pattern, lines[i + 2])
if re.search(self.pattern, lines[i + 1]) or not re.search(
self.pattern, lines[i + 2]
):
raise ValueError(f"Unexpected data format starting on line {i}")

Expand Down Expand Up @@ -228,4 +227,4 @@ def is_hhr(header: list[str]) -> bool:
bool
True if the header comes from a file measured across h/hr space, False otherwise.
"""
return not any(re.match('(Hc1|Hc2|Hb1|Hb2).*', line) for line in header)
return not any(re.match("(Hc1|Hc2|Hb1|Hb2).*", line) for line in header)
51 changes: 25 additions & 26 deletions pyforc/core/ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,10 +52,11 @@ def interpolate(
np.linspace(hr_min, hr_max, int((hr_max - hr_min) // step) + 1),
)

hhr_vals = np.concatenate(
(np.reshape(h_vals, (-1, 1)), np.reshape(hr_vals, (-1, 1))),
axis=1
),
hhr_vals = (
np.concatenate(
(np.reshape(h_vals, (-1, 1)), np.reshape(hr_vals, (-1, 1))), axis=1
),
)

m = si.griddata(hhr_vals, m_vals, (h, hr), method=config.interpolation)

Expand Down Expand Up @@ -117,15 +118,11 @@ def correct_drift(data: ForcData, config: Config) -> ForcData:
m_sat_avg = np.mean(data.m_drift)

m_sat_mov = decimate(
moving_average(data.m_drift, config.drift_kernel_size),
config.drift_density
)
index_mov = decimate(
np.arange(0, len(data.m_drift)),
config.drift_density
moving_average(data.m_drift, config.drift_kernel_size), config.drift_density
)
index_mov = decimate(np.arange(0, len(data.m_drift)), config.drift_density)

m_sat_interp = si.interp1d(index_mov, m_sat_mov, kind='cubic')
m_sat_interp = si.interp1d(index_mov, m_sat_mov, kind="cubic")

m_raw = []
for i, curve in enumerate(data.m_raw):
Expand Down Expand Up @@ -179,7 +176,7 @@ def moving_average(data: np.ndarray, kernel_size: int) -> np.ndarray:
return sn.convolve(
data,
np.ones(window_size) / window_size,
mode='nearest',
mode="nearest",
)


Expand Down Expand Up @@ -215,7 +212,8 @@ def normalize(data: ForcData, _) -> ForcData:
"""
return ForcData.from_existing(
data=data,
m=1 - 2 * (np.nanmax(data.m) - data.m) / (np.nanmax(data.m) - np.nanmin(data.m))
m=1
- 2 * (np.nanmax(data.m) - data.m) / (np.nanmax(data.m) - np.nanmin(data.m)),
)


Expand Down Expand Up @@ -244,19 +242,16 @@ def correct_slope(data: ForcData, config: Config) -> ForcData:
ForcData
Data with the background subtracted out.
"""
fit_region = (np.abs(data.h) > config.h_sat) & (~np.isnan(data.m)) & (data.h >= data.hr)
fit_region = (
(np.abs(data.h) > config.h_sat) & (~np.isnan(data.m)) & (data.h >= data.hr)
)

h = data.h[fit_region].flatten()
m = data.m[fit_region].flatten()

(a, b1, b2), _ = so.curve_fit(
generate_fit_func(h, config.h_sat), h, m
)
(a, b1, b2), _ = so.curve_fit(generate_fit_func(h, config.h_sat), h, m)

return ForcData.from_existing(
data=data,
m=data.m - line(data.h, a, 0)
)
return ForcData.from_existing(data=data, m=data.m - line(data.h, a, 0))


def generate_fit_func(
Expand Down Expand Up @@ -289,6 +284,7 @@ def fit_func(h: np.ndarray, a: float, b1: float, b2: float) -> np.ndarray:
y[i_upper_saturation] = line(h[i_upper_saturation], a, b1)
y[~i_upper_saturation] = line(h[~i_upper_saturation], a, b2)
return y

return fit_func


Expand Down Expand Up @@ -340,12 +336,13 @@ def compute_forc_distribution(data: ForcData, config: Config) -> ForcData:
step = data.get_step()
return ForcData.from_existing(
data=data,
rho=-0.5 * sn.convolve(
rho=-0.5
* sn.convolve(
input=data.m,
weights=compute_sg_kernel(config.smoothing_factor, step, step),
mode='constant',
mode="constant",
cval=np.nan,
)
),
)


Expand All @@ -370,12 +367,14 @@ def compute_sg_kernel(sf: int, step_x: float, step_y: float) -> np.ndarray:
"""
xx, yy = np.meshgrid(
np.linspace(sf * step_x, -sf * step_x, 2 * sf + 1),
np.linspace(sf * step_y, -sf * step_y, 2 * sf + 1)
np.linspace(sf * step_y, -sf * step_y, 2 * sf + 1),
)

xx = np.reshape(xx, (-1, 1))
yy = np.reshape(yy, (-1, 1))

coefficients = np.linalg.pinv(np.hstack((np.ones_like(xx), xx, xx ** 2, yy, yy ** 2, xx * yy)))
coefficients = np.linalg.pinv(
np.hstack((np.ones_like(xx), xx, xx**2, yy, yy**2, xx * yy))
)

return np.reshape(coefficients[5, :], (2 * sf + 1, 2 * sf + 1))
Loading

0 comments on commit ed0f74d

Please sign in to comment.